diff --git a/.eslintrc b/.eslintrc index 077931cae..d8474e32a 100644 --- a/.eslintrc +++ b/.eslintrc @@ -11,6 +11,7 @@ "no-console": "off" }, "ignorePatterns": [ - "**/lib/*" + "**/lib/*", + "lib/*" ] } diff --git a/.github/workflows/l1-contracts-ci.yaml b/.github/workflows/l1-contracts-ci.yaml index 2123b47cb..964efaea8 100644 --- a/.github/workflows/l1-contracts-ci.yaml +++ b/.github/workflows/l1-contracts-ci.yaml @@ -3,6 +3,11 @@ name: L1 contracts CI on: pull_request: +# We need this permissions for this CI to work with external contributions +permissions: + contents: read + pull-requests: write + jobs: build: runs-on: ubuntu-latest @@ -17,21 +22,37 @@ jobs: node-version: 18.18.0 cache: yarn + - name: Use Foundry + uses: foundry-rs/foundry-toolchain@v1 + - name: Install dependencies run: yarn - - name: Build artifacts - run: yarn l1 build + - name: Install l2 deps + working-directory: ./l2-contracts + run: yarn + + - name: Install l1 deps + working-directory: ./l1-contracts + run: yarn - - name: Build L2 artifacts + - name: Build l2 artifacts run: yarn l2 build + - name: Build l1 artifacts + run: yarn l1 build + + - name: Build da-contracts artifacts + run: yarn da build:foundry + - name: Create cache uses: actions/cache/save@v3 with: key: artifacts-l1-${{ github.sha }} path: | + da-contracts/out l1-contracts/artifacts + l1-contracts/artifacts-zk l1-contracts/cache l1-contracts/typechain l2-contracts/artifacts-zk @@ -85,12 +106,69 @@ jobs: fail-on-cache-miss: true key: artifacts-l1-${{ github.sha }} path: | + da-contracts/out l1-contracts/artifacts + l1-contracts/artifacts-zk l1-contracts/cache l1-contracts/typechain + l2-contracts/artifacts-zk + l2-contracts/cache-zk + l2-contracts/typechain - name: Run tests - run: yarn l1 test:foundry + working-directory: ./l1-contracts + run: FOUNDRY_PROFILE=default yarn test:foundry + + test-foundry-zksync: + needs: [build, lint] + runs-on: ubuntu-latest + + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: 18.18.0 + cache: yarn + + - name: Install dependencies + run: yarn + + - name: Build system contract artifacts + run: yarn sc build + + - name: Restore artifacts cache + uses: actions/cache/restore@v3 + with: + fail-on-cache-miss: true + key: artifacts-l1-${{ github.sha }} + path: | + da-contracts/out + l1-contracts/artifacts + l1-contracts/artifacts-zk + l1-contracts/cache + l1-contracts/typechain + l2-contracts/artifacts-zk + l2-contracts/cache-zk + l2-contracts/typechain + + - name: Install foundry zksync + run: | + wget https://github.com/matter-labs/foundry-zksync/releases/download/nightly-f908ce43834bc1ffb4de6576ea5600eaab49dddb/foundry_nightly_linux_amd64.tar.gz -O foundry-zksync.tar.gz + tar -xzf foundry-zksync.tar.gz + sudo mv forge /usr/local/bin/forge + sudo mv cast /usr/local/bin/cast + sudo chmod +x /usr/local/bin/forge + sudo chmod +x /usr/local/bin/cast + forge --version + + - name: Run tests + working-directory: ./l1-contracts + run: FOUNDRY_PROFILE=default yarn test:zkfoundry test-hardhat: needs: [build, lint] @@ -109,23 +187,32 @@ jobs: - name: Install dependencies run: yarn + - name: Install l1 deps + working-directory: ./l1-contracts + run: yarn + - name: Restore artifacts cache uses: actions/cache/restore@v3 with: fail-on-cache-miss: true key: artifacts-l1-${{ github.sha }} path: | + da-contracts/out l1-contracts/artifacts + l1-contracts/artifacts-zk l1-contracts/cache l1-contracts/typechain l2-contracts/artifacts-zk l2-contracts/cache-zk l2-contracts/typechain + - name: Build L2 contracts + run: yarn l2 build + - name: Run tests run: yarn l1 test --no-compile - check-verifier-generator: + check-verifier-generator-l1: runs-on: ubuntu-latest steps: @@ -145,3 +232,131 @@ jobs: - name: Compare run: diff tools/data/Verifier.sol l1-contracts/contracts/state-transition/Verifier.sol + + coverage: + defaults: + run: + working-directory: l1-contracts + needs: [build, lint] + runs-on: ubuntu-latest + + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Use Foundry + uses: foundry-rs/foundry-toolchain@v1 + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: 18.18.0 + cache: yarn + + - name: Install dependencies + run: yarn + + - name: Restore artifacts cache + uses: actions/cache/restore@v3 + with: + fail-on-cache-miss: true + key: artifacts-l1-${{ github.sha }} + path: | + da-contracts/out + l1-contracts/artifacts + l1-contracts/artifacts-zk + l1-contracts/cache + l1-contracts/typechain + l2-contracts/artifacts-zk + l2-contracts/cache-zk + l2-contracts/typechain + + - name: Run coverage + run: FOUNDRY_PROFILE=default yarn test:foundry && FOUNDRY_PROFILE=default yarn coverage:foundry --report summary --report lcov + + # To ignore coverage for certain directories modify the paths in this step as needed. The + # below default ignores coverage results for the test and script directories. Alternatively, + # to include coverage in all directories, comment out this step. Note that because this + # filtering applies to the lcov file, the summary table generated in the previous step will + # still include all files and directories. + # The `--rc lcov_branch_coverage=1` part keeps branch info in the filtered report, since lcov + # defaults to removing branch info. + - name: Filter directories + run: | + sudo apt update && sudo apt install -y lcov + lcov --remove lcov.info 'test/*' 'contracts/dev-contracts/*' '../lib/forge-std/*' '../lib/murky/*' 'lib/*' '../lib/*' 'lib/' 'deploy-scripts/*' --output-file lcov.info --rc lcov_branch_coverage=1 + + # This step posts a detailed coverage report as a comment and deletes previous comments on + # each push. The below step is used to fail coverage if the specified coverage threshold is + # not met. The below step can post a comment (when it's `github-token` is specified) but it's + # not as useful, and this action cannot fail CI based on a minimum coverage threshold, which + # is why we use both in this way. + - name: Post coverage report + if: github.event_name == 'pull_request' # This action fails when ran outside of a pull request. + uses: romeovs/lcov-reporter-action@v0.3.1 + with: + delete-old-comments: true + lcov-file: ./l1-contracts/lcov.info + github-token: ${{ secrets.GITHUB_TOKEN }} # Adds a coverage summary comment to the PR. + + - name: Verify minimum coverage + uses: zgosalvez/github-actions-report-lcov@v2 + with: + coverage-files: ./l1-contracts/lcov.info + working-directory: l1-contracts + minimum-coverage: 85 # Set coverage threshold. + + gas-report: + needs: [build, lint] + runs-on: ubuntu-latest + + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Use Foundry + uses: foundry-rs/foundry-toolchain@v1 + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: 18.18.0 + cache: yarn + + - name: Install dependencies + run: yarn + + - name: Restore artifacts cache + uses: actions/cache/restore@v3 + with: + fail-on-cache-miss: true + key: artifacts-l1-${{ github.sha }} + path: | + l1-contracts/artifacts + l1-contracts/cache + l1-contracts/typechain + + # Add any step generating a gas report to a temporary file named gasreport.ansi. For example: + - name: Run tests + run: yarn l1 test:foundry --gas-report | tee gasreport.ansi # <- this file name should be unique in your repository! + + - name: Compare gas reports + uses: Rubilmax/foundry-gas-diff@v3.18 + with: + summaryQuantile: 0.0 # only display the 10% most significant gas diffs in the summary (defaults to 20%) + sortCriteria: avg,max # sort diff rows by criteria + sortOrders: desc,asc # and directions + ignore: test-foundry/**/*,l1-contracts/contracts/dev-contracts/**/*,l1-contracts/lib/**/*,l1-contracts/contracts/common/Dependencies.sol + id: gas_diff + + - name: Add gas diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + # delete the comment in case changes no longer impact gas costs + delete: ${{ !steps.gas_diff.outputs.markdown }} + message: ${{ steps.gas_diff.outputs.markdown }} diff --git a/.github/workflows/l1-contracts-foundry-ci.yaml b/.github/workflows/l1-contracts-foundry-ci.yaml index 02c551d63..4d90553c9 100644 --- a/.github/workflows/l1-contracts-foundry-ci.yaml +++ b/.github/workflows/l1-contracts-foundry-ci.yaml @@ -28,6 +28,9 @@ jobs: - name: Install dependencies run: yarn + - name: Build hardhat artifacts + run: yarn l1 build + - name: Build artifacts working-directory: ./l1-contracts run: forge build @@ -35,13 +38,23 @@ jobs: - name: Build system-contract artifacts run: yarn sc build + - name: Build l2 artifacts + run: yarn l2 build + + - name: Build da-contracts artifacts + run: yarn da build:foundry + - name: Create cache uses: actions/cache/save@v3 with: key: artifacts-l1-contracts-foudry-${{ github.sha }} path: | + da-contracts/out l1-contracts/cache l1-contracts/out + l1-contracts/artifacts-zk + l2-contracts/artifacts-zk + l2-contracts/cache-zk system-contracts/artifacts-zk system-contracts/bootloader/build system-contracts/cache-zk @@ -63,8 +76,12 @@ jobs: fail-on-cache-miss: true key: artifacts-l1-contracts-foudry-${{ github.sha }} path: | + da-contracts/out l1-contracts/cache l1-contracts/out + l1-contracts/artifacts-zk + l2-contracts/artifacts-zk + l2-contracts/cache-zk system-contracts/artifacts-zk system-contracts/bootloader/build system-contracts/cache-zk @@ -76,7 +93,7 @@ jobs: - name: Copy configs from template working-directory: ./l1-contracts - run: cp -r deploy-script-config-template script-config + run: cp -r deploy-script-config-template/. script-config - name: Run anvil run: | @@ -107,11 +124,11 @@ jobs: working-directory: ./l1-contracts run: forge script ./deploy-scripts/DeployErc20.s.sol --ffi --rpc-url $ANVIL_RPC_URL --broadcast --private-key $ANVIL_PRIVATE_KEY # TODO restore scripts verification -# - name: Run RegisterHyperchain script +# - name: Run RegisterZKChain script # working-directory: ./l1-contracts # run: | -# cat ./script-out/output-deploy-l1.toml >> ./script-config/register-hyperchain.toml -# forge script ./deploy-scripts/RegisterHyperchain.s.sol --ffi --rpc-url $ANVIL_RPC_URL --broadcast --private-key $ANVIL_PRIVATE_KEY +# cat ./script-out/output-deploy-l1.toml >> ./script-config/register-zk-chain.toml +# forge script ./deploy-scripts/RegisterZKChain.s.sol --ffi --rpc-url $ANVIL_RPC_URL --broadcast --private-key $ANVIL_PRIVATE_KEY # - name: Run InitializeL2WethToken script # working-directory: ./l1-contracts-foundry # run: forge script ./deploy-scripts/InitializeL2WethToken.s.sol --ffi --rpc-url $ANVIL_RPC_URL --broadcast --private-key $ANVIL_PRIVATE_KEY diff --git a/.github/workflows/l2-contracts-ci.yaml b/.github/workflows/l2-contracts-ci.yaml index 20bb9583f..e7e4b9541 100644 --- a/.github/workflows/l2-contracts-ci.yaml +++ b/.github/workflows/l2-contracts-ci.yaml @@ -26,6 +26,9 @@ jobs: - name: Build L2 artifacts run: yarn l2 build + - name: Build system contract artifacts + run: yarn sc build + - name: Create cache uses: actions/cache/save@v3 with: @@ -37,6 +40,9 @@ jobs: l2-contracts/artifacts-zk l2-contracts/cache-zk l2-contracts/typechain + system-contracts/artifacts-zk + system-contracts/cache-zk + system-contracts/typechain lint: runs-on: ubuntu-latest @@ -57,6 +63,23 @@ jobs: - name: Lint run: yarn lint:check + check-verifier-generator-l2: + needs: [build] + runs-on: ubuntu-latest + + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Generate Verifier.sol + working-directory: tools + run: cargo run --bin zksync_verifier_contract_generator --release -- --input_path data/scheduler_key.json --l2_mode + + - name: Compare + run: diff tools/data/Verifier.sol l2-contracts/contracts/verifier/Verifier.sol + test: needs: [build, lint] runs-on: ubuntu-latest @@ -88,9 +111,19 @@ jobs: l2-contracts/artifacts-zk l2-contracts/cache-zk l2-contracts/typechain - - - name: Run Era test node - uses: dutterbutter/era-test-node-action@v0.1.3 + system-contracts/artifacts-zk + system-contracts/cache-zk + system-contracts/typechain + + - name: Install foundry zksync + run: | + wget https://github.com/matter-labs/foundry-zksync/releases/download/nightly-f908ce43834bc1ffb4de6576ea5600eaab49dddb/foundry_nightly_linux_amd64.tar.gz -O foundry-zksync.tar.gz + tar -xzf foundry-zksync.tar.gz + sudo mv forge /usr/local/bin/forge + sudo mv cast /usr/local/bin/cast + sudo chmod +x /usr/local/bin/forge + sudo chmod +x /usr/local/bin/cast + forge --version - name: Run tests - run: yarn l2 test + run: yarn l2 test:foundry diff --git a/.gitignore b/.gitignore index 63f96063b..2128686e0 100644 --- a/.gitignore +++ b/.gitignore @@ -22,7 +22,16 @@ l1-contracts/lcov.info l1-contracts/report/* l1-contracts/coverage/* l1-contracts/out/* +l1-contracts/zkout/* l1-contracts/broadcast/* l1-contracts/script-config/* +!l1-contracts/script-config/artifacts l1-contracts/script-out/* +l1-contracts/test/foundry/l1/integration/deploy-scripts/script-out/*.toml !l1-contracts/script-out/.gitkeep +*.timestamp +l1-contracts/test/foundry/l1/integration/deploy-scripts/script-out/* +l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-zk-chain-*.toml +l1-contracts/test/foundry/integration/deploy-scripts/script-out/* +l1-contracts/test/foundry/l1/integration/upgrade-envs/script-out/*.toml +l1-contracts/zkout/* diff --git a/.gitmodules b/.gitmodules index 5cbc631ba..f94071e53 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,14 +1,17 @@ -[submodule "l1-contracts/lib/forge-std"] - path = l1-contracts/lib/forge-std - url = https://github.com/foundry-rs/forge-std -[submodule "l1-contracts/lib/murky"] - path = l1-contracts/lib/murky +[submodule "lib/murky"] + path = lib/murky url = https://github.com/dmfxyz/murky -[submodule "l1-contracts/lib/openzeppelin-contracts-upgradeable"] - path = l1-contracts/lib/openzeppelin-contracts-upgradeable +[submodule "lib/openzeppelin-contracts-upgradeable-v4"] + path = lib/openzeppelin-contracts-upgradeable-v4 url = https://github.com/Openzeppelin/openzeppelin-contracts-upgradeable branch = release-v4.9 -[submodule "l1-contracts/lib/openzeppelin-contracts"] - path = l1-contracts/lib/openzeppelin-contracts +[submodule "lib/openzeppelin-contracts-v4"] + path = lib/openzeppelin-contracts-v4 url = https://github.com/Openzeppelin/openzeppelin-contracts branch = release-v4.9 +[submodule "lib/forge-std"] + path = lib/forge-std + url = https://github.com/foundry-rs/forge-std +[submodule "lib/@matterlabs/zksync-contracts"] + path = lib/@matterlabs/zksync-contracts + url = https://github.com/matter-labs/v2-testnet-contracts diff --git a/.markdownlintignore b/.markdownlintignore index 5abdcbeb3..cffb39f9c 100644 --- a/.markdownlintignore +++ b/.markdownlintignore @@ -2,7 +2,6 @@ node_modules # l1-contracts -l1-contracts/lib l1-contracts/node_modules # l1-contracts-foundry @@ -14,3 +13,8 @@ l2-contracts/node_modules # system-contracts system-contracts/node_modules system-contracts/bootloader/test_infra/target + +l1-contracts/lib +lib/ +l2-contracts/lib +system-contracts/lib diff --git a/.prettierignore b/.prettierignore index 5bc4f9aa7..0c2a4c4dc 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,6 +1,7 @@ tools/data l1-contracts/lib l1-contracts-foundry/lib +lib system-contracts/contracts/openzeppelin system-contracts/contracts/Constants.sol system-contracts/artifacts-zk @@ -10,3 +11,6 @@ l1-contracts/cache l1-contracts/cache-forge l1-contracts/artifacts l1-contracts/artifacts-forge +l1-contracts/zkout +l2-contracts/zkout +system-contracts/zkout diff --git a/.solhint.json b/.solhint.json index 617a892bf..ef3522c45 100644 --- a/.solhint.json +++ b/.solhint.json @@ -1,31 +1,40 @@ { "extends": "solhint:recommended", "rules": { - "state-visibility": "off", - "func-visibility": ["warn", { "ignoreConstructors": true }], - "var-name-mixedcase": "off", - "avoid-call-value": "off", - "no-empty-blocks": "off", - "not-rely-on-time": "off", + "avoid-call-value": "error", "avoid-low-level-calls": "off", - "no-inline-assembly": "off", + "avoid-sha3": "error", + "check-send-result": "error", + "compiler-version": ["error", "^0.8.0"], "const-name-snakecase": "off", - "no-complex-fallback": "off", - "reason-string": "off", + "contract-name-camelcase": "off", + "gas-calldata-parameters": "error", + "gas-custom-errors": "error", + "gas-increment-by-one": "error", + "gas-length-in-loops": "error", + "gas-struct-packing": "error", + "explicit-types": "error", "func-name-mixedcase": "off", - "custom-errors": "off", - "no-unused-vars": "error", + "func-named-parameters": ["error", 4], + "func-visibility": ["error", { "ignoreConstructors": true }], + "imports-on-top": "error", "max-states-count": "off", + "modifier-name-mixedcase": "error", + "named-parameters-mapping": "off", + "no-complex-fallback": "off", + "no-console": "error", + "no-empty-blocks": "off", "no-global-import": "error", + "no-inline-assembly": "off", "no-unused-import": "error", - "explicit-types": "error", - "modifier-name-mixedcase": "error", - "imports-on-top": "error", + "no-unused-vars": "error", + "not-rely-on-time": "off", "quotes": "error", - "use-forbidden-name": "error", - "visibility-modifier-order": "error", + "reason-string": "error", "reentrancy": "error", - "func-named-parameters": ["error", 4], - "compiler-version": ["error", "^0.8.0"] + "state-visibility": "error", + "use-forbidden-name": "error", + "var-name-mixedcase": "off", + "visibility-modifier-order": "error" } } diff --git a/.solhintignore b/.solhintignore index ec8271f7e..7ba03ff3e 100644 --- a/.solhintignore +++ b/.solhintignore @@ -6,6 +6,9 @@ l1-contracts/cache l1-contracts/cache-forge l1-contracts/lib l1-contracts/node_modules +l1-contracts/contracts/dev-contracts +l1-contracts/test +l1-contracts/deploy-scripts # l1-contracts-foundry l1-contracts-foundry/cache @@ -14,7 +17,18 @@ l1-contracts-foundry/lib # l2-contracts l2-contracts/cache-zk l2-contracts/node_modules +l2-contracts/contracts/dev-contracts +l2-contracts/test # system-contracts system-contracts/contracts/openzeppelin system-contracts/contracts/Constants.sol +system-contracts/contracts/test-contracts +system-contracts/contracts-preprocessed + +# gas-bound-caller +gas-bound-caller + +lib/* +l2-contracts/lib +system-contracts/lib diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index dd3d45842..46bdeebac 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -34,7 +34,7 @@ We aim to make it as easy as possible to contribute to the mission. This is stil and suggestions here too. Some resources to help: 1. [In-repo docs aimed at developers](docs) -2. [zkSync Era docs!](https://era.zksync.io/docs/) +2. [ZKsync Era docs!](https://era.zksync.io/docs/) 3. Company links can be found in the [repo's readme](README.md) ## Code of Conduct diff --git a/README.md b/README.md index 6e3e06aba..cc1425b5b 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ -# zkSync Era: Smart Contracts +# ZKsync Era: Smart Contracts [![Logo](eraLogo.svg)](https://zksync.io/) -zkSync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or +ZKsync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or decentralization. Since it's EVM compatible (Solidity/Vyper), 99% of Ethereum projects can redeploy without refactoring -or re-auditing a single line of code. zkSync Era also uses an LLVM-based compiler that will eventually let developers +or re-auditing a single line of code. ZKsync Era also uses an LLVM-based compiler that will eventually let developers write smart contracts in C++, Rust and other popular languages. -This repository contains both L1 and L2 zkSync smart contracts. For their description see the +This repository contains both L1 and L2 ZKsync smart contracts. For their description see the [system overview](docs/Overview.md). ## Disclaimer @@ -17,7 +17,7 @@ others may not. ## License -zkSync Era contracts are distributed under the terms of the MIT license. +ZKsync Era contracts are distributed under the terms of the MIT license. See [LICENSE-MIT](LICENSE-MIT) for details. @@ -33,7 +33,7 @@ See [LICENSE-MIT](LICENSE-MIT) for details. ## Disclaimer -zkSync Era has been through lots of testing and audits. Although it is live, it is still in alpha state and will go +ZKsync Era has been through lots of testing and audits. Although it is live, it is still in alpha state and will go through more audits and bug bounties programs. We would love to hear our community's thoughts and suggestions about it! It is important to state that forking it now can potentially lead to missing important security updates, critical features, and performance improvements. diff --git a/_typos.toml b/_typos.toml index 54e99fdf1..b2c7c85c8 100644 --- a/_typos.toml +++ b/_typos.toml @@ -5,6 +5,7 @@ extend-exclude = [ "/l1-contracts/out/", "/l1-contracts/node_modules/", "/l1-contracts/artifacts", + "/l1-contracts-foundry/lib/", "/l2-contracts/artifacts-zk", "/l2-contracts/cache-zk", "/l2-contracts/typechain", diff --git a/change.txt b/change.txt new file mode 100644 index 000000000..c97c9f07f --- /dev/null +++ b/change.txt @@ -0,0 +1 @@ +solpp/=cache/solpp-generated-contracts/ diff --git a/contracts-review-prep.md b/contracts-review-prep.md new file mode 100644 index 000000000..883362f7c --- /dev/null +++ b/contracts-review-prep.md @@ -0,0 +1,65 @@ +# Contracts Review Preparation + +## High-level Overview + +### Reason for changes + +The goal was to be build a foundation to be able to support token bridging with custom logic on receiving chain (not wrapped), as well as custom bridging logic (assets, which accrue value over time, like LRTs). +For clarity, we only developed a framework, the exact logic for custom tokens and custom bridging will follow. + +### Major changes + +In order to achieve it, we separated the liquidity managing logic from the Shared Bridge to `Asset Handlers`. The basic cases will be handled by `Native Token Vaults`, which are handling all of the standard `ERC20 tokens`, as well as `ETH`. + +### New concepts + +- assetHandler => contract that manages liquidity (burns/mints, locks/unlocks) for specific token (or a set of them) +- assetId => identifier to track bridged assets across chains linked to specific asset handler + +## Known Issues + +### storage layout + +L2SharedBridge will be a system contract, L2NativeTokenVault will replace it (the storage layout is still not yet backwards compatible) + +### bridgehubDeposit API change + +> /// @notice Allows bridgehub to acquire mintValue for L1->L2 transactions. + + /// @dev If the corresponding L2 transaction fails, refunds are issued to a refund recipient on L2. + function bridgehubDepositBaseToken( + uint256 _chainId, + bytes32 _assetId, + +Note, that the new SB is not compatible with both: + +- old Mailbox on Era +- old Bridgehub +- And vice versa. + +We need to either: + +- ensure that all 3 get upgraded at the same time. Upgrading BH and SB at the same time is feasible IMHO. But upgrading DP (Mailbox) in the same transaction may not be. +- have a concrete plan for such case. E.g. explicitly tell that the legacy deposits will stop working. In this case we need to check (at the very least visually) that the worst thing that can happen is just deposits not working and not some funds lost +- Add the corresponding legacy functions + This text you see here is \*actually- written in Markdown! To get a feel + for Markdown's syntax, type some text into the left window and + watch the results in the right. + +### not allowing legacy withdrawals + +> require(!\_isEraLegacyEthWithdrawal(\_chainId, \_l2BatchNumber), "ShB: legacy eth withdrawal"); + +No method to finalize an old withdrawal. +We will manually finalize all legacy withdrawals before the upgrade, i.e. withdrawals that happened before the previous Bridgehub upgrade. + +### Custom Errors not implemented + +> require(expectedDepositAmount == \_depositAmount, "3T"); // The token has non-standard transfer logic + +Custom errors will be introduced for all contracts. + +## Migration plan + +- Bulkheads will need to be migrated (methods added) +- Tokens will have to be transferred (methods added) diff --git a/da-contracts/.env b/da-contracts/.env new file mode 100644 index 000000000..59a2db08b --- /dev/null +++ b/da-contracts/.env @@ -0,0 +1,2 @@ +CHAIN_ETH_NETWORK=hardhat +ETH_CLIENT_WEB3_URL=http://127.0.0.1:8545 diff --git a/da-contracts/contracts/CalldataDA.sol b/da-contracts/contracts/CalldataDA.sol new file mode 100644 index 000000000..ffb666f5f --- /dev/null +++ b/da-contracts/contracts/CalldataDA.sol @@ -0,0 +1,111 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-custom-errors, reason-string + +/// @dev Total number of bytes in a blob. Blob = 4096 field elements * 31 bytes per field element +/// @dev EIP-4844 defines it as 131_072 but we use 4096 * 31 within our circuits to always fit within a field element +/// @dev Our circuits will prove that a EIP-4844 blob and our internal blob are the same. +uint256 constant BLOB_SIZE_BYTES = 126_976; + +/// @dev The state diff hash, hash of pubdata + the number of blobs. +uint256 constant BLOB_DATA_OFFSET = 65; + +/// @dev The size of the commitment for a single blob. +uint256 constant BLOB_COMMITMENT_SIZE = 32; + +/// @notice Contract that contains the functionality for process the calldata DA. +/// @dev The expected l2DAValidator that should be used with it `RollupL2DAValidator`. +abstract contract CalldataDA { + /// @notice Parses the input that the L2 DA validator has provided to the contract. + /// @param _l2DAValidatorOutputHash The hash of the output of the L2 DA validator. + /// @param _maxBlobsSupported The maximal number of blobs supported by the chain. + /// @param _operatorDAInput The DA input by the operator provided on L1. + function _processL2RollupDAValidatorOutputHash( + bytes32 _l2DAValidatorOutputHash, + uint256 _maxBlobsSupported, + bytes calldata _operatorDAInput + ) + internal + pure + returns ( + bytes32 stateDiffHash, + bytes32 fullPubdataHash, + bytes32[] memory blobsLinearHashes, + uint256 blobsProvided, + bytes calldata l1DaInput + ) + { + // The preimage under the hash `_l2DAValidatorOutputHash` is expected to be in the following format: + // - First 32 bytes are the hash of the uncompressed state diff. + // - Then, there is a 32-byte hash of the full pubdata. + // - Then, there is the 1-byte number of blobs published. + // - Then, there are linear hashes of the published blobs, 32 bytes each. + + // Check that it accommodates enough pubdata for the state diff hash, hash of pubdata + the number of blobs. + require(_operatorDAInput.length >= BLOB_DATA_OFFSET, "too small"); + + stateDiffHash = bytes32(_operatorDAInput[:32]); + fullPubdataHash = bytes32(_operatorDAInput[32:64]); + blobsProvided = uint256(uint8(_operatorDAInput[64])); + + require(blobsProvided <= _maxBlobsSupported, "invalid number of blobs"); + + // Note that the API of the contract requires that the returned blobs linear hashes have length of + // the `_maxBlobsSupported` + blobsLinearHashes = new bytes32[](_maxBlobsSupported); + + require(_operatorDAInput.length >= BLOB_DATA_OFFSET + 32 * blobsProvided, "invalid blobs hashes"); + + _cloneCalldata(blobsLinearHashes, _operatorDAInput[BLOB_DATA_OFFSET:], blobsProvided); + + uint256 ptr = BLOB_DATA_OFFSET + 32 * blobsProvided; + + // Now, we need to double check that the provided input was indeed returned by the L2 DA validator. + require(keccak256(_operatorDAInput[:ptr]) == _l2DAValidatorOutputHash, "invalid l2 DA output hash"); + + // The rest of the output was provided specifically by the operator + l1DaInput = _operatorDAInput[ptr:]; + } + + /// @notice Verify that the calldata DA was correctly provided. + /// @param _blobsProvided The number of blobs provided. + /// @param _fullPubdataHash Hash of the pubdata preimage. + /// @param _maxBlobsSupported Maximum number of blobs supported. + /// @param _pubdataInput Full pubdata + an additional 32 bytes containing the blob commitment for the pubdata. + /// @dev We supply the blob commitment as part of the pubdata because even with calldata the prover will check these values. + function _processCalldataDA( + uint256 _blobsProvided, + bytes32 _fullPubdataHash, + uint256 _maxBlobsSupported, + bytes calldata _pubdataInput + ) internal pure virtual returns (bytes32[] memory blobCommitments, bytes calldata _pubdata) { + require(_blobsProvided == 1, "one blob with calldata"); + require(_pubdataInput.length >= BLOB_COMMITMENT_SIZE, "pubdata too small"); + + // We typically do not know whether we'll use calldata or blobs at the time when + // we start proving the batch. That's why the blob commitment for a single blob is still present in the case of calldata. + + blobCommitments = new bytes32[](_maxBlobsSupported); + + _pubdata = _pubdataInput[:_pubdataInput.length - BLOB_COMMITMENT_SIZE]; + + require(_pubdata.length <= BLOB_SIZE_BYTES, "cz"); + require(_fullPubdataHash == keccak256(_pubdata), "wp"); + blobCommitments[0] = bytes32(_pubdataInput[_pubdataInput.length - BLOB_COMMITMENT_SIZE:_pubdataInput.length]); + } + + /// @notice Method that clones a slice of calldata into a bytes32[] memory array. + /// @param _dst The destination array. + /// @param _input The input calldata. + /// @param _len The length of the slice in 32-byte words to clone. + function _cloneCalldata(bytes32[] memory _dst, bytes calldata _input, uint256 _len) internal pure { + assembly { + // The pointer to the allocated memory above. We skip 32 bytes to avoid overwriting the length. + let dstPtr := add(_dst, 0x20) + let inputPtr := _input.offset + calldatacopy(dstPtr, inputPtr, mul(_len, 32)) + } + } +} diff --git a/da-contracts/contracts/DAContractsErrors.sol b/da-contracts/contracts/DAContractsErrors.sol new file mode 100644 index 000000000..2116d582d --- /dev/null +++ b/da-contracts/contracts/DAContractsErrors.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.21; + +// 0x53dee67b +error PubdataCommitmentsEmpty(); +// 0x7734c31a +error PubdataCommitmentsTooBig(); +// 0x53e6d04d +error InvalidPubdataCommitmentsSize(); +// 0xafd53e2f +error BlobHashCommitmentError(uint256 index, bool blobHashEmpty, bool blobCommitmentEmpty); +// 0xfc7ab1d3 +error EmptyBlobVersionHash(uint256 index); +// 0x92290acc +error NonEmptyBlobVersionHash(uint256 index); +// 0x8d5851de +error PointEvalCallFailed(bytes); +// 0x4daa985d +error PointEvalFailed(bytes); diff --git a/da-contracts/contracts/DAUtils.sol b/da-contracts/contracts/DAUtils.sol new file mode 100644 index 000000000..f79e609e9 --- /dev/null +++ b/da-contracts/contracts/DAUtils.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @dev Total number of bytes in a blob. Blob = 4096 field elements * 31 bytes per field element +/// @dev EIP-4844 defines it as 131_072 but we use 4096 * 31 within our circuits to always fit within a field element +/// @dev Our circuits will prove that a EIP-4844 blob and our internal blob are the same. +uint256 constant BLOB_SIZE_BYTES = 126_976; + +/// @dev Enum used to determine the source of pubdata. At first we will support calldata and blobs but this can be extended. +enum PubdataSource { + Calldata, + Blob +} + +/// @dev BLS Modulus value defined in EIP-4844 and the magic value returned from a successful call to the +/// point evaluation precompile +uint256 constant BLS_MODULUS = 52435875175126190479447740508185965837690552500527637822603658699938581184513; + +/// @dev Packed pubdata commitments. +/// @dev Format: list of: opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes)) = 144 bytes +uint256 constant PUBDATA_COMMITMENT_SIZE = 144; + +/// @dev Offset in pubdata commitment of blobs for claimed value +uint256 constant PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET = 16; + +/// @dev Offset in pubdata commitment of blobs for kzg commitment +uint256 constant PUBDATA_COMMITMENT_COMMITMENT_OFFSET = 48; + +/// @dev For each blob we expect that the commitment is provided as well as the marker whether a blob with such commitment has been published before. +uint256 constant BLOB_DA_INPUT_SIZE = PUBDATA_COMMITMENT_SIZE + 32; + +/// @dev Address of the point evaluation precompile used for EIP-4844 blob verification. +address constant POINT_EVALUATION_PRECOMPILE_ADDR = address(0x0A); + +/// @dev The address of the special smart contract that can send arbitrary length message as an L2 log +address constant L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR = address(0x8008); diff --git a/da-contracts/contracts/IL1DAValidator.sol b/da-contracts/contracts/IL1DAValidator.sol new file mode 100644 index 000000000..c22e9c557 --- /dev/null +++ b/da-contracts/contracts/IL1DAValidator.sol @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +struct L1DAValidatorOutput { + /// @dev The hash of the uncompressed state diff. + bytes32 stateDiffHash; + /// @dev The hashes of the blobs on L1. The array is dynamic to account for forward compatibility. + /// The length of it must be equal to `maxBlobsSupported`. + bytes32[] blobsLinearHashes; + /// @dev The commitments to the blobs on L1. The array is dynamic to account for forward compatibility. + /// Its length must be equal to the length of blobsLinearHashes. + /// @dev If the system supports more blobs than returned, the rest of the array should be filled with zeros. + bytes32[] blobsOpeningCommitments; +} + +interface IL1DAValidator { + /// @notice The function that checks the data availability for the given batch input. + /// @param _chainId The chain id of the chain that is being committed. + /// @param _chainId The batch number for which the data availability is being checked. + /// @param _l2DAValidatorOutputHash The hash of that was returned by the l2DAValidator. + /// @param _operatorDAInput The DA input by the operator provided on L1. + /// @param _maxBlobsSupported The maximal number of blobs supported by the chain. + /// We provide this value for future compatibility. + /// This is needed because the corresponding `blobsLinearHashes`/`blobsOpeningCommitments` + /// in the `L1DAValidatorOutput` struct will have to have this length as it is required + /// to be static by the circuits. + function checkDA( + uint256 _chainId, + uint256 _batchNumber, + bytes32 _l2DAValidatorOutputHash, + bytes calldata _operatorDAInput, + uint256 _maxBlobsSupported + ) external returns (L1DAValidatorOutput memory output); +} diff --git a/da-contracts/contracts/IL1Messenger.sol b/da-contracts/contracts/IL1Messenger.sol new file mode 100644 index 000000000..f0557487b --- /dev/null +++ b/da-contracts/contracts/IL1Messenger.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice The interface of the L1 Messenger contract, responsible for sending messages to L1. + */ +interface IL1Messenger { + function sendToL1(bytes memory _message) external returns (bytes32); +} diff --git a/da-contracts/contracts/RollupL1DAValidator.sol b/da-contracts/contracts/RollupL1DAValidator.sol new file mode 100644 index 000000000..99a57a7c3 --- /dev/null +++ b/da-contracts/contracts/RollupL1DAValidator.sol @@ -0,0 +1,197 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-custom-errors, reason-string + +import {IL1DAValidator, L1DAValidatorOutput} from "./IL1DAValidator.sol"; + +import {CalldataDA} from "./CalldataDA.sol"; + +import {PubdataSource, BLS_MODULUS, PUBDATA_COMMITMENT_SIZE, PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET, PUBDATA_COMMITMENT_COMMITMENT_OFFSET, BLOB_DA_INPUT_SIZE, POINT_EVALUATION_PRECOMPILE_ADDR} from "./DAUtils.sol"; + +import {PubdataCommitmentsEmpty, InvalidPubdataCommitmentsSize, BlobHashCommitmentError, EmptyBlobVersionHash, NonEmptyBlobVersionHash, PointEvalCallFailed, PointEvalFailed} from "./DAContractsErrors.sol"; + +uint256 constant BLOBS_SUPPORTED = 6; + +contract RollupL1DAValidator is IL1DAValidator, CalldataDA { + /// @dev The published blob commitments. Note, that the correctness of blob commitment with relation to the linear hash + /// is *not* checked in this contract, but is expected to be checked at the verification stage of the ZK contract. + mapping(bytes32 blobCommitment => bool isPublished) public publishedBlobCommitments; + + /// @notice Publishes certain blobs, marking commitments to them as published. + /// @param _pubdataCommitments The commitments to the blobs to be published. + /// `_pubdataCommitments` is a packed list of commitments of the following format: + /// opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes) + function publishBlobs(bytes calldata _pubdataCommitments) external { + if (_pubdataCommitments.length == 0) { + revert PubdataCommitmentsEmpty(); + } + if (_pubdataCommitments.length % PUBDATA_COMMITMENT_SIZE != 0) { + revert InvalidPubdataCommitmentsSize(); + } + + uint256 versionedHashIndex = 0; + // solhint-disable-next-line gas-length-in-loops + for (uint256 i = 0; i < _pubdataCommitments.length; i += PUBDATA_COMMITMENT_SIZE) { + bytes32 blobCommitment = _getPublishedBlobCommitment( + versionedHashIndex, + _pubdataCommitments[i:i + PUBDATA_COMMITMENT_SIZE] + ); + publishedBlobCommitments[blobCommitment] = true; + ++versionedHashIndex; + } + } + + /// @inheritdoc IL1DAValidator + function checkDA( + uint256, // _chainId + uint256, // _batchNumber + bytes32 _l2DAValidatorOutputHash, + bytes calldata _operatorDAInput, + uint256 _maxBlobsSupported + ) external view returns (L1DAValidatorOutput memory output) { + ( + bytes32 stateDiffHash, + bytes32 fullPubdataHash, + bytes32[] memory blobsLinearHashes, + uint256 blobsProvided, + bytes calldata l1DaInput + ) = _processL2RollupDAValidatorOutputHash(_l2DAValidatorOutputHash, _maxBlobsSupported, _operatorDAInput); + + uint8 pubdataSource = uint8(l1DaInput[0]); + bytes32[] memory blobCommitments; + + if (pubdataSource == uint8(PubdataSource.Blob)) { + blobCommitments = _processBlobDA(blobsProvided, _maxBlobsSupported, l1DaInput[1:]); + } else if (pubdataSource == uint8(PubdataSource.Calldata)) { + (blobCommitments, ) = _processCalldataDA(blobsProvided, fullPubdataHash, _maxBlobsSupported, l1DaInput[1:]); + } else { + revert("l1-da-validator/invalid-pubdata-source"); + } + + // We verify that for each set of blobHash/blobCommitment are either both empty + // or there are values for both. + // This is mostly a sanity check and it is not strictly required. + for (uint256 i = 0; i < _maxBlobsSupported; ++i) { + if ( + (blobsLinearHashes[i] == bytes32(0) && blobCommitments[i] != bytes32(0)) || + (blobsLinearHashes[i] != bytes32(0) && blobCommitments[i] == bytes32(0)) + ) { + revert BlobHashCommitmentError(i, blobsLinearHashes[i] == bytes32(0), blobCommitments[i] == bytes32(0)); + } + } + + output.stateDiffHash = stateDiffHash; + output.blobsLinearHashes = blobsLinearHashes; + output.blobsOpeningCommitments = blobCommitments; + } + + /// @notice Generated the blob commitemnt to be used in the cryptographic proof by calling the point evaluation precompile. + /// @param _index The index of the blob in this transaction. + /// @param _commitment The packed: opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes)) = 144 bytes + /// @return The commitment to be used in the cryptographic proof. + function _getPublishedBlobCommitment(uint256 _index, bytes calldata _commitment) internal view returns (bytes32) { + bytes32 blobVersionedHash = _getBlobVersionedHash(_index); + + if (blobVersionedHash == bytes32(0)) { + revert EmptyBlobVersionHash(_index); + } + + // First 16 bytes is the opening point. While we get the point as 16 bytes, the point evaluation precompile + // requires it to be 32 bytes. The blob commitment must use the opening point as 16 bytes though. + bytes32 openingPoint = bytes32( + uint256(uint128(bytes16(_commitment[:PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET]))) + ); + + _pointEvaluationPrecompile( + blobVersionedHash, + openingPoint, + _commitment[PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET:PUBDATA_COMMITMENT_SIZE] + ); + + // Take the hash of the versioned hash || opening point || claimed value + return keccak256(abi.encodePacked(blobVersionedHash, _commitment[:PUBDATA_COMMITMENT_COMMITMENT_OFFSET])); + } + + /// @notice Verify that the blob DA was correctly provided. + /// @param _blobsProvided The number of blobs provided. + /// @param _maxBlobsSupported Maximum number of blobs supported. + /// @param _operatorDAInput Input used to verify that the blobs contain the data we expect. + function _processBlobDA( + uint256 _blobsProvided, + uint256 _maxBlobsSupported, + bytes calldata _operatorDAInput + ) internal view returns (bytes32[] memory blobsCommitments) { + blobsCommitments = new bytes32[](_maxBlobsSupported); + + // For blobs we expect to receive the commitments in the following format: + // 144 bytes for commitment data + // 32 bytes for the prepublished commitment. If it is non-zero, it means that it is expected that + // such commitment was published before. Otherwise, it is expected that it is published in this transaction + if (_operatorDAInput.length != _blobsProvided * BLOB_DA_INPUT_SIZE) { + revert InvalidPubdataCommitmentsSize(); + } + + uint256 versionedHashIndex = 0; + + // we iterate over the `_operatorDAInput`, while advancing the pointer by `BLOB_DA_INPUT_SIZE` each time + for (uint256 i = 0; i < _blobsProvided; ++i) { + bytes calldata commitmentData = _operatorDAInput[:PUBDATA_COMMITMENT_SIZE]; + bytes32 prepublishedCommitment = bytes32( + _operatorDAInput[PUBDATA_COMMITMENT_SIZE:PUBDATA_COMMITMENT_SIZE + 32] + ); + + if (prepublishedCommitment != bytes32(0)) { + // We double check that this commitment has indeed been published. + // If that is the case, we do not care about the actual underlying data. + require(publishedBlobCommitments[prepublishedCommitment], "not published"); + + blobsCommitments[i] = prepublishedCommitment; + } else { + blobsCommitments[i] = _getPublishedBlobCommitment(versionedHashIndex, commitmentData); + ++versionedHashIndex; + } + + // Advance the pointer + _operatorDAInput = _operatorDAInput[BLOB_DA_INPUT_SIZE:]; + } + + // This check is required because we want to ensure that there aren't any extra blobs trying to be published. + // Calling the BLOBHASH opcode with an index > # blobs - 1 yields bytes32(0) + bytes32 versionedHash = _getBlobVersionedHash(versionedHashIndex); + if (versionedHash != bytes32(0)) { + revert NonEmptyBlobVersionHash(versionedHashIndex); + } + } + + /// @notice Calls the point evaluation precompile and verifies the output + /// Verify p(z) = y given commitment that corresponds to the polynomial p(x) and a KZG proof. + /// Also verify that the provided commitment matches the provided versioned_hash. + /// + function _pointEvaluationPrecompile( + bytes32 _versionedHash, + bytes32 _openingPoint, + bytes calldata _openingValueCommitmentProof + ) internal view { + bytes memory precompileInput = abi.encodePacked(_versionedHash, _openingPoint, _openingValueCommitmentProof); + + (bool success, bytes memory data) = POINT_EVALUATION_PRECOMPILE_ADDR.staticcall(precompileInput); + + // We verify that the point evaluation precompile call was successful by testing the latter 32 bytes of the + // response is equal to BLS_MODULUS as defined in https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile + if (!success) { + revert PointEvalCallFailed(precompileInput); + } + (, uint256 result) = abi.decode(data, (uint256, uint256)); + if (result != BLS_MODULUS) { + revert PointEvalFailed(abi.encode(result)); + } + } + + function _getBlobVersionedHash(uint256 _index) internal view virtual returns (bytes32 versionedHash) { + assembly { + versionedHash := blobhash(_index) + } + } +} diff --git a/da-contracts/foundry.toml b/da-contracts/foundry.toml new file mode 100644 index 000000000..6f29a31cc --- /dev/null +++ b/da-contracts/foundry.toml @@ -0,0 +1,32 @@ +[profile.default] +src = 'contracts' +out = 'out' +libs = ['node_modules', 'lib'] +remappings = [ + "@openzeppelin/contracts/=lib/openzeppelin-contracts/contracts/", + "@openzeppelin/contracts-upgradeable/=lib/openzeppelin-contracts-upgradeable/contracts/", + "l2-contracts/=../l2-contracts/contracts/" +] +allow_paths = ["../l2-contracts/contracts"] +fs_permissions = [ + { access = "read", path = "../system-contracts/bootloader/build/artifacts" }, + { access = "read", path = "../system-contracts/artifacts-zk/contracts-preprocessed" }, + { access = "read", path = "../l2-contracts/artifacts-zk/" }, + { access = "read", path = "./script-config" }, + { access = "read-write", path = "./script-out" }, + { access = "read", path = "./out" } +] +cache_path = 'cache-forge' +test = 'test/foundry' +solc_version = "0.8.24" +evm_version = "cancun" +ignored_error_codes = [ + "missing-receive-ether", + "code-size", +] +ignored_warnings_from = [ + "test", + "contracts/dev-contracts" +] + +# See more config options https://github.com/foundry-rs/foundry/tree/master/crates/config diff --git a/da-contracts/hardhat.config.ts b/da-contracts/hardhat.config.ts new file mode 100644 index 000000000..884dc43d3 --- /dev/null +++ b/da-contracts/hardhat.config.ts @@ -0,0 +1,57 @@ +import "@nomiclabs/hardhat-ethers"; +import "@nomiclabs/hardhat-etherscan"; +import "@nomiclabs/hardhat-waffle"; +import "hardhat-contract-sizer"; +import "hardhat-gas-reporter"; +import "hardhat-typechain"; +import "solidity-coverage"; + +// If no network is specified, use the default config +if (!process.env.CHAIN_ETH_NETWORK) { + // eslint-disable-next-line @typescript-eslint/no-var-requires + require("dotenv").config(); +} + +export default { + defaultNetwork: "env", + solidity: { + version: "0.8.24", + settings: { + optimizer: { + enabled: true, + runs: 9999999, + }, + outputSelection: { + "*": { + "*": ["storageLayout"], + }, + }, + evmVersion: "cancun", + }, + }, + contractSizer: { + runOnCompile: false, + except: ["dev-contracts", "zksync/libraries", "common/libraries"], + }, + paths: { + sources: "./contracts", + }, + networks: { + env: { + url: process.env.ETH_CLIENT_WEB3_URL?.split(",")[0], + }, + hardhat: { + allowUnlimitedContractSize: false, + forking: { + url: "https://eth-goerli.g.alchemy.com/v2/" + process.env.ALCHEMY_KEY, + enabled: process.env.TEST_CONTRACTS_FORK === "1", + }, + }, + }, + etherscan: { + apiKey: process.env.MISC_ETHERSCAN_API_KEY, + }, + gasReporter: { + enabled: true, + }, +}; diff --git a/da-contracts/package.json b/da-contracts/package.json new file mode 100644 index 000000000..ec4ca3b45 --- /dev/null +++ b/da-contracts/package.json @@ -0,0 +1,66 @@ +{ + "name": "da-contracts", + "version": "0.1.0", + "license": "MIT", + "engines": { + "node": ">=16" + }, + "devDependencies": { + "@nomiclabs/hardhat-ethers": "^2.0.0", + "@nomiclabs/hardhat-etherscan": "^3.1.0", + "@nomiclabs/hardhat-waffle": "^2.0.0", + "@openzeppelin/contracts": "4.9.5", + "@openzeppelin/contracts-upgradeable": "4.9.5", + "@typechain/ethers-v5": "^2.0.0", + "@types/argparse": "^1.0.36", + "@types/chai": "^4.2.21", + "@types/chai-as-promised": "^7.1.4", + "@types/mocha": "^8.2.3", + "argparse": "^1.0.10", + "axios": "^0.21.1", + "chai": "^4.3.10", + "chai-as-promised": "^7.1.1", + "chalk": "^4.1.0", + "collections": "^5.1.12", + "commander": "^8.3.0", + "eslint": "^8.51.0", + "eslint-import-resolver-typescript": "^3.6.1", + "eslint-plugin-import": "^2.29.0", + "eslint-plugin-prettier": "^5.0.1", + "ethereum-waffle": "^4.0.10", + "ethereumjs-abi": "^0.6.8", + "ethers": "^5.7.0", + "ethjs": "^0.4.0", + "fs": "^0.0.1-security", + "handlebars": "^4.7.6", + "hardhat": "=2.22.2", + "hardhat-contract-sizer": "^2.0.2", + "hardhat-gas-reporter": "^1.0.9", + "hardhat-typechain": "^0.3.3", + "jsonwebtoken": "^8.5.1", + "markdownlint-cli": "^0.33.0", + "merkletreejs": "^0.3.11", + "mocha": "^9.0.2", + "path": "^0.12.7", + "querystring": "^0.2.0", + "solc": "0.8.17", + "solhint": "^3.6.2", + "solidity-coverage": "^0.8.5", + "ts-generator": "^0.1.1", + "ts-node": "^10.1.0", + "typechain": "^4.0.0", + "typescript": "^4.6.4", + "zksync-ethers": "5.8.0-beta.5" + }, + "scripts": { + "build": "hardhat compile ", + "build:foundry": "forge build", + "clean": "hardhat clean", + "clean:foundry": "forge clean", + "verify": "hardhat run --network env scripts/verify.ts" + }, + "dependencies": { + "dotenv": "^16.0.3", + "solhint-plugin-prettier": "^0.0.5" + } +} diff --git a/da-contracts/slither.config.json b/da-contracts/slither.config.json new file mode 100644 index 000000000..0db8465a6 --- /dev/null +++ b/da-contracts/slither.config.json @@ -0,0 +1,11 @@ +{ + "filter_paths": "(contracts/dev-contracts|lib|node_modules)", + "detectors_to_exclude": "assembly,solc-version,low-level-calls,conformance-to-solidity-naming-conventions,incorrect-equality,uninitialized-local", + "exclude_dependencies": true, + "compile_force_framework": "foundry", + "exclude_medium": false, + "exclude_high": false, + "exclude_low": true, + "exclude_informational": true, + "exclude_optimization": true +} diff --git a/da-contracts/tsconfig.json b/da-contracts/tsconfig.json new file mode 100644 index 000000000..41d8e3fa8 --- /dev/null +++ b/da-contracts/tsconfig.json @@ -0,0 +1,7 @@ +{ + "compilerOptions": { + "types": ["node", "mocha"], + "downlevelIteration": true, + "resolveJsonModule": true + } +} diff --git a/docs/Overview.md b/docs/Overview.md index 6af3d407d..bcee716b2 100644 --- a/docs/Overview.md +++ b/docs/Overview.md @@ -1,6 +1,6 @@ # Overview -zkSync Era is a permissionless general-purpose ZK rollup. Similar to many L1 blockchains and sidechains it enables +ZKsync Era is a permissionless general-purpose ZK rollup. Similar to many L1 blockchains and sidechains it enables deployment and interaction with Turing-complete smart contracts. - L2 smart contracts are executed on a zkEVM. @@ -10,7 +10,7 @@ deployment and interaction with Turing-complete smart contracts. - There is no escape hatch mechanism yet, but there will be one. All data that is needed to restore the L2 state are also pushed on-chain. There are two approaches, publishing inputs of -L2 transactions on-chain and publishing the state transition diff. zkSync follows the second option. +L2 transactions on-chain and publishing the state transition diff. ZKsync follows the second option. See the [documentation](https://era.zksync.io/docs/dev/fundamentals/rollups.html) to read more! @@ -25,13 +25,13 @@ See the [documentation](https://era.zksync.io/docs/dev/fundamentals/rollups.html L2 blocks. - **Facet** - implementation contract. The word comes from the EIP-2535. - **Gas** - a unit that measures the amount of computational effort required to execute specific operations on the - zkSync Era network. + ZKsync Era network. ### L1 Smart contracts #### Diamond -Technically, this L1 smart contract acts as a connector between Ethereum (L1) and zkSync (L2). This contract checks the +Technically, this L1 smart contract acts as a connector between Ethereum (L1) and ZKsync (L2). This contract checks the validity proof and data availability, handles L2 <-> L1 communication, finalizes L2 state transition, and more. There are also important contracts deployed on the L2 that can also execute logic called _system contracts_. Using L2 @@ -73,7 +73,7 @@ execution of upgrades in the diamond proxy. This contract manages operations (calls with preconditions) for governance tasks. The contract allows for operations to be scheduled, executed, and canceled with appropriate permissions and delays. It is used for managing and coordinating -upgrades and changes in all zkSync Era governed contracts. +upgrades and changes in all ZKsync Era governed contracts. Each upgrade consists of two steps: @@ -122,8 +122,8 @@ function applyL1ToL2Alias(address l1Address) internal pure returns (address l2Ad ``` For most of the rollups the address aliasing needs to prevent cross-chain exploits that would otherwise be possible if -we simply reused the same L1 addresses as the L2 sender. In zkSync Era address derivation rule is different from the -Ethereum, so cross-chain exploits are already impossible. However, zkSync Era may add full EVM support in the future, so +we simply reused the same L1 addresses as the L2 sender. In ZKsync Era address derivation rule is different from the +Ethereum, so cross-chain exploits are already impossible. However, ZKsync Era may add full EVM support in the future, so applying address aliasing leave room for future EVM compatibility. The L1 -> L2 communication is also used for bridging ether. The user should include a `msg.value` when initiating a @@ -157,7 +157,7 @@ this trick: #### L1 -> L2 Transaction filtering There is a mechanism for applying custom filters to the L1 -> L2 communication. It is achieved by having an address of -the `TransactionFilterer` contract in the `ZkSyncHyperchainStorage`. If the filterer exists, it is being called in +the `TransactionFilterer` contract in the `ZkSyncZKChainStorage`. If the filterer exists, it is being called in the `Mailbox` facet with the tx details and has to return whether the transaction can be executed or not. The filterer has to implement the `ITransactionFilterer` interface. The ones intended to use this feature, have to deploy the contract that implements `ITransactionFilterer` and use `setTransactionFilterer` function of `AdminFacet` to set the @@ -178,12 +178,12 @@ Each L2 -> L1 system log will have a key that is part of the following: ```solidity enum SystemLogKey { L2_TO_L1_LOGS_TREE_ROOT_KEY, - TOTAL_L2_TO_L1_PUBDATA_KEY, - STATE_DIFF_HASH_KEY, PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, PREV_BATCH_HASH_KEY, CHAINED_PRIORITY_TXN_HASH_KEY, NUMBER_OF_LAYER_1_TXS_KEY, + L2_DA_VALIDATOR_OUTPUT_HASH_KEY, + USED_L2_DA_VALIDATOR_ADDRESS_KEY, EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY } ``` @@ -223,7 +223,7 @@ fee-on-transfer tokens or other custom logic for handling user balances. Only wo - `claimFailedDeposit` - unlock funds if the deposit was initiated but then failed on L2. - `finalizeWithdrawal` - unlock funds for the valid withdrawal request from L2. -##### L1SharedBridge +##### L1AssetRouter The "standard" implementation of the ERC20 and WETH token bridge. Works only with regular ERC20 tokens, i.e. not with fee-on-transfer tokens or other custom logic for handling user balances. @@ -253,8 +253,8 @@ the L1 recipient. #### ValidatorTimelock -An intermediate smart contract between the validator EOA account and the zkSync smart contract. Its primary purpose is -to provide a trustless means of delaying batch execution without modifying the main zkSync contract. zkSync actively +An intermediate smart contract between the validator EOA account and the ZKsync smart contract. Its primary purpose is +to provide a trustless means of delaying batch execution without modifying the main ZKsync contract. ZKsync actively monitors the chain activity and reacts to any suspicious activity by freezing the chain. This allows time for investigation and mitigation before resuming normal operations. @@ -264,12 +264,12 @@ the Alpha stage. This contract consists of four main functions `commitBatches`, `proveBatches`, `executeBatches`, and `revertBatches`, that can be called only by the validator. -When the validator calls `commitBatches`, the same calldata will be propagated to the zkSync contract (`DiamondProxy` +When the validator calls `commitBatches`, the same calldata will be propagated to the ZKsync contract (`DiamondProxy` through `call` where it invokes the `ExecutorFacet` through `delegatecall`), and also a timestamp is assigned to these batches to track the time these batches are committed by the validator to enforce a delay between committing and execution of batches. Then, the validator can prove the already committed batches regardless of the mentioned timestamp, -and again the same calldata (related to the `proveBatches` function) will be propagated to the zkSync contract. After, -the `delay` is elapsed, the validator is allowed to call `executeBatches` to propagate the same calldata to zkSync +and again the same calldata (related to the `proveBatches` function) will be propagated to the ZKsync contract. After, +the `delay` is elapsed, the validator is allowed to call `executeBatches` to propagate the same calldata to ZKsync contract. ### L2 specifics diff --git a/docs/cab/SDK_compatibility.md b/docs/cab/SDK_compatibility.md new file mode 100644 index 000000000..c9f2ee10a --- /dev/null +++ b/docs/cab/SDK_compatibility.md @@ -0,0 +1,25 @@ +# SDK and contracts compatibility + +## Shared Bridge upgrade + +| | Old sdk | Bridgehub SDK | +| ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| deposit | L1ERC20 Bridge is called. This forwards the funds and the call to the depositLegacyErc20Bridge on the L1SharedBridge. We store depositAmount in ERC and depositHappened in SharedBridge. BH is called with requestL2TransactionDirect. | Bridgehub requestL2Transaction(Direct/TwoBridges) is called, this calls bridgehubDeposit, bridgehubDepositBaseToken. We store the txHash in depositHappened. | +| finalizeWithdrawal | L1ERC20 Bridge is called. We check isWithdrawalFinalized in L1ERC20Bridge. We forward to L1SharedBridge. We check isWithdrawalFinalized and for legacy Eth Era withdrawals the Mailbox as well. We store isWithdrawalFinalized in L1SharedBridge. | L1SharedBridge is called. We check for legacy token and Eth Era withdrawals that the txs has not been finalized on the L1ERC20 and the Mailbox. We store isWithdrawalFinalized. | +| claimFailedDeposit | L1ERC20 Bridge is called. We check and delete depositAmount. L1SharedBridge is called, with checkedInLegacyBridge flag = true. We check and delete depositHappened if we can, i.e. if the deposit is not a legacy deposit ( legacy deposits never touched the L1SharedBridge). | L1SharedBridge is called, we check depositHappened and delete it. | +| l2TokenAddress | L1ERC20 Bridge is called. | L1ERC20 Bridge is called. | +| L2 withdrawal | L1ERC20 Bridge is called | L1ERC20 Bridge is called | + +## Custom Asset Bridging upgrade + +We will finalize all legacy ( before the Bridgehub upgrade) withdrawals before the Custom Asset Bridging upgrade. This will cut complexity. + +Note: in the first version the finalizeDeposit call to the L2SharedBridge is not updated for NTV assets to keep the SDK backwards compatible. + +| | Old sdk | Bridgehub SDK | Custom Asset Bridging | +| ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| deposit | L1ERC20 Bridge is called. This forwards the funds and the call to the depositLegacyErc20Bridge on the L1SharedBridge. We store depositAmount in ERC and depositHappened in SharedBridge. Funds are further forwarded to NTV ( todo: do this directly from ERC20Bridge), and NTV is called to store funds. BH is called with requestL2TransactionDirect. | Bridgehub calls L1SharedBridge, AssetData encoding and token changed, L1SharedBridge can parse both | Bridgehub calls L1SharedBridge, AssetData encoding and token changed, L1SharedBridge can parse both | +| finalizeWithdrawal | L1ERC20 Bridge is called. We check isWithdrawalFinalized in L1ERC20Bridge. We forward to L1SharedBridge. We check the withdrawal is not legacy before the BH upgrade. We store isWithdrawalFinalized in L1SharedBridge. | L1SharedBridge is called. We change token -> assetId. We check the withdrawal is not before the BH upgrade. We store isWithdrawalFinalized in L1SharedBridge. Forward to NTV to bridgeMint. | L1SharedBridge is called. We check the withdrawal is not before the BH upgrade. We store isWithdrawalFinalized in L1SharedBridge. Forward to NTV to bridgeMint. | +| claimFailedDeposit | L1ERC20 Bridge is called | L1SharedBridge is called | L1SharedBridge is called | +| l2TokenAddress | L1ERC20 Bridge is called | L1ERC20 Bridge is called | L1ERC20 Bridge is called | +| L2 withdrawal | L1ERC20 Bridge is called | L1SharedBridge is called | L1SharedBridge is called | diff --git a/docs/cab/asset-registration.png b/docs/cab/asset-registration.png new file mode 100644 index 000000000..faec62f0d Binary files /dev/null and b/docs/cab/asset-registration.png differ diff --git a/docs/cab/automatic-bridging.png b/docs/cab/automatic-bridging.png new file mode 100644 index 000000000..c1d5d9195 Binary files /dev/null and b/docs/cab/automatic-bridging.png differ diff --git a/docs/cab/contracts-review-cab.md b/docs/cab/contracts-review-cab.md new file mode 100644 index 000000000..3fa46229c --- /dev/null +++ b/docs/cab/contracts-review-cab.md @@ -0,0 +1,78 @@ +# Custom Asset Bridging + +## High-level Overview + +### Reason for changes + +The goal was to be build a modular bridge which separates the logic of L1<>L2 messaging from the holding of the asset. This enables bridging many custom tokens, assets which accrue value over time (like LRTs), WETH, and even custom assets like NFTs. + +This upgrade only contains the framework, the logic of the custom bridges can be developed by third parties. + +### Major changes + +In order to achieve it, we separated the liquidity managing logic from the Shared Bridge to `Asset Handlers`. The basic cases will be handled by `Native Token Vaults`, which are handling all of the standard `ERC20 tokens`, as well as `ETH`. + +### New concepts + +- AssetDeploymentTracker => contract that manages the deployment of asset handlers across chains. It is the contract that registers these asset handlers in the AssetRouters. +- AssetHandler => contract that manages liquidity (burns/mints, locks/unlocks) for specific token (or a set of them) +- assetId => identifier to track bridged assets across chains linked to specific asset handler. + +### Normal flow + +Assets Handlers are registered in the Routers based on their assetId. The assetId is used to identify the asset when bridging, it is sent with the cross-chain transaction data and Router routes the data to the appropriate Handler. If the asset handler is not registered in the L2 Router, then the L1->L2 bridging transaction will fail on the L2 (expect for NTV assets, see below). + +`assetId = keccak256(chainId, asset deployment tracker = msg.sender, additionalData)` + +Asset registration is handled by the AssetDeploymentTracker. It is expected that this contract is deployed on the L1. Registration can be permissionless depending on the Asset (e.g. the AssetHandler can be deployed on the chain at a predefined address, this can message the L1 ADT, which can then register the asset in the Router). Registering the L1 Handler in the L1 Router can be done via a direct function call from the L1 Deployment Tracker. Registration in the L2 Router is done indirectly via the L1 Router. + +![Asset Registration](./asset-registration.png) + +The Native Token Vault is a special case of the Asset Handler, as we want it to support automatic bridging. This means it should be possible to bridge a L1 token to an L2 without deploying the Token contract beforehand and without registering it in the L2 Router. For NTV assets, L1->L2 transactions where the AssetHandler is not registered will not fail, but the message will be automatically be forwarded to the L2NTV. Here the contract checks that the asset is indeed deployed by the L1NTV, by checking that the assetId contains the correct ADT address (note, for NTV assets the ADT is the NTV and the used address is the L2NTV address). If the assetId is correct, the token contract is deployed. + +![Automatic Bridge](./automatic-bridging.png) + +## Known Issues + +### storage layout + +L2SharedBridge will be a system contract, L2NativeTokenVault will replace it (the storage layout is still not yet backwards compatible) + +### bridgehubDeposit API change + +> /// @notice Allows bridgehub to acquire mintValue for L1->L2 transactions. + + /// @dev If the corresponding L2 transaction fails, refunds are issued to a refund recipient on L2. + function bridgehubDepositBaseToken( + uint256 _chainId, + bytes32 _assetId, + +Note, that the new SB is not compatible with both: + +- old Mailbox on Era +- old Bridgehub +- And vice versa. + +We need to either: + +- ensure that all 3 get upgraded at the same time. Upgrading BH and SB at the same time is feasible IMHO. But upgrading DP (Mailbox) in the same transaction may not be. +- have a concrete plan for such case. E.g. explicitly tell that the legacy deposits will stop working. In this case we need to check (at the very least visually) that the worst thing that can happen is just deposits not working and not some funds lost +- Add the corresponding legacy functions + +### Not allowing legacy withdrawals + +> require(!\_isEraLegacyEthWithdrawal(\_chainId, \_l2BatchNumber), "ShB: legacy eth withdrawal"); + +No method to finalize an old withdrawal. +We will manually finalize all legacy withdrawals before the upgrade, i.e. withdrawals that happened before the previous Bridgehub upgrade. + +### Custom Errors not implemented + +> require(expectedDepositAmount == \_depositAmount, "3T"); // The token has non-standard transfer logic + +Custom errors will be introduced for all contracts. + +## Migration plan + +- Bulkheads will need to be migrated (methods added) +- Tokens will have to be transferred (methods added) diff --git a/docs/gateway/Custom-da-contracts.png b/docs/gateway/Custom-da-contracts.png new file mode 100644 index 000000000..b97d9755d Binary files /dev/null and b/docs/gateway/Custom-da-contracts.png differ diff --git a/docs/gateway/Custom-da-external.png b/docs/gateway/Custom-da-external.png new file mode 100644 index 000000000..0c318dd53 Binary files /dev/null and b/docs/gateway/Custom-da-external.png differ diff --git a/docs/gateway/Hyperchain-scheme.png b/docs/gateway/Hyperchain-scheme.png new file mode 100644 index 000000000..b5ec44b01 Binary files /dev/null and b/docs/gateway/Hyperchain-scheme.png differ diff --git a/docs/gateway/L1-GM-Chain.png b/docs/gateway/L1-GM-Chain.png new file mode 100644 index 000000000..84b0bd0f3 Binary files /dev/null and b/docs/gateway/L1-GM-Chain.png differ diff --git a/docs/gateway/L1-L2.png b/docs/gateway/L1-L2.png new file mode 100644 index 000000000..41ac428f7 Binary files /dev/null and b/docs/gateway/L1-L2.png differ diff --git a/docs/gateway/MessageRoot.png b/docs/gateway/MessageRoot.png new file mode 100644 index 000000000..c85845396 Binary files /dev/null and b/docs/gateway/MessageRoot.png differ diff --git a/docs/gateway/PQ1.png b/docs/gateway/PQ1.png new file mode 100644 index 000000000..0f3602371 Binary files /dev/null and b/docs/gateway/PQ1.png differ diff --git a/docs/gateway/PQ2.png b/docs/gateway/PQ2.png new file mode 100644 index 000000000..92a3e3002 Binary files /dev/null and b/docs/gateway/PQ2.png differ diff --git a/docs/gateway/PQ3.png b/docs/gateway/PQ3.png new file mode 100644 index 000000000..8cd5fd847 Binary files /dev/null and b/docs/gateway/PQ3.png differ diff --git a/docs/gateway/chain-asset-id-registration.png b/docs/gateway/chain-asset-id-registration.png new file mode 100644 index 000000000..98dff79b2 Binary files /dev/null and b/docs/gateway/chain-asset-id-registration.png differ diff --git a/docs/gateway/chain-migration.md b/docs/gateway/chain-migration.md new file mode 100644 index 000000000..c638cc5c9 --- /dev/null +++ b/docs/gateway/chain-migration.md @@ -0,0 +1,13 @@ +# Chain migration + +Chain migration uses the Custom Asset Bridging framework: + +- CTMs can be deployed on the Gateway. Each CTM has its own assetId. +- The CTM Deployment Tracker deployed on L1 registers assetId in the L1 and L2 AssetRouters, with the Bridgehub as the AssetHandler. It also registers the L1 and L2 CTM contracts to be associated to the assetId in the Bridgehubs. +- Bridging of a chain happens via the Bridgehub, AssetRouters, and CTM. + +![CTM assetId registration](./chain-asset-id-registration.png) +_Note these are separate calls_ + +![Chain migration](./chain-migration.png) +_Note these are a single call with an L1->L2 txs_ diff --git a/docs/gateway/chain-migration.png b/docs/gateway/chain-migration.png new file mode 100644 index 000000000..668d9bfa7 Binary files /dev/null and b/docs/gateway/chain-migration.png differ diff --git a/docs/gateway/contracts-review-gateway.md b/docs/gateway/contracts-review-gateway.md new file mode 100644 index 000000000..5e14df72e --- /dev/null +++ b/docs/gateway/contracts-review-gateway.md @@ -0,0 +1,68 @@ +# Contracts review - Gateway + +## Intro + +The ZK Gateway is a proof composition layer that will allow chains to settle seamlessly to Ethereum while doing interop (interop not yet supported). In this first version the Gateway is an instance of the EraVM. + +List of changes and new features: + +- Priority Queue uses a Merkle tree structure to store the transactions. This is needed for efficient migration to and from the Gateway. The merkle tree is a DynamicIncrementalMerkleTree, for more information see the PriorityQueue doc. +- Custom Data Availability contracts. This is needed to handle the relayed data availability on the Gateway. +- L1 -> Gateway -> ZKChain transactions. This is done by forwarding transactions to Chain's Mailbox on the Gateway via the Gateway's Mailbox. +- ZKChain -> Gateway -> L1 transactions. This is done by aggregating the logs of different chains in the MessageRoot contract, and sending a single log to L1. +- Migration of chains to and from the Gateway. This is done using our Custom Asset Bridging framework, each CTM has an assetId and is managed by a shared CTMDeploymentTracker, the L2AssetRouter = L2SharedBridge is deployed on the Gateway, but only holds the chains as assets, with the Bridgehub as the AssetHandler. + +Other smaller changes: + +- The setChainId upgrade is updated to become the Genesis upgrade with an L2 contract (deployed at genesis in user-space). +- The Bridgehub, MessageRoot, L2SharedBridge and L2NativeTokenVault contracts are now deployed at genesis on the L2 at fixed addresses in user space. +- The SharedBridges are renamed to AssetRouters. +- Merkle.sol was moved from state-transition/libraries to common/libraries. + +Known issues, and features that still need to be implemented: + +- ZKChain -> Gateway -> L1 transactions are not yet supported. +- Chains cannot yet leave the Gateway. Failed migration to the Gateway cannot yet be reclaimed. +- Upgrade process, how do we upgrade to CAB bridge, to the new system contracts. +- We had the syncLayer internal name previously for the Gateway. This has not been replaced everywhere yet. +- permissions for some functions are not properly restricted yet, mostly they are missing a modifier. +- Bridgehub setAssetHandlerAddress `address sender` might be an issue. +- MessageRoot should be renamed to MessageRootAggregator + +![Untitled](./ZKChain-scheme.png) + +## Initial Scope + +- l1-contracts/contracts/ + - FullMerkle.sol + - DynamicIncrementalMerkle.sol + - Merkle.sol + - PriorityTree.sol + - CalldataDA.sol + - ReleyedSLDAValidator.sol + - Mailbox/\_writePriorityOp function (not other functions) + - Executor.sol +- da-contracts/contracts/\* +- system-contracts/contracts/ + - PubdataChunkPublisher.sol + - L1Messenger.sol + - Compressor.sol +- l2-contracts/contracts/ + - data-availability/\* + - L2ContractHelper.sol + +## Later scope + +The majority of the rest of the changes. This makes the scope quite big, so please focus on the initial scope in more detail, and if you have time include the later scope. + +- MessageRoot.sol +- CTMDeploymentTracker.sol +- Bridgehub.sol +- Config.sol +- L2ContractAddresses.sol +- ChainTypeManager.sol +- ValidatorTimelock.sol +- DiamondInit.sol +- ZKChainStorage.sol +- Admin.sol +- L1GenesisUpgrade.sol diff --git a/docs/gateway/custom-da.md b/docs/gateway/custom-da.md new file mode 100644 index 000000000..acb719835 --- /dev/null +++ b/docs/gateway/custom-da.md @@ -0,0 +1,47 @@ +# Custom DA support + +## Intro + +We introduced modularity into our contracts to support multiple DA layers, easier support for Validium and Rollup mode, and to settlement via the Gateway. + +![The contracts for the rollup case](./Custom-da-contracts.png) +![The general architecture](./Custom-da-external.png) + +### Background + +**Pubdata** - information published by the ZK Chain that can be used to reconstruct its state, it consists of l2→l1 logs, l2→l1 messages, contract bytecodes, and compressed state diffs. + +```solidity +struct PubdataInput { + pub(crate) user_logs: Vec, + pub(crate) l2_to_l1_messages: Vec>, + pub(crate) published_bytecodes: Vec>, + pub(crate) state_diffs: Vec, +} +``` + +The current version of ZK Chains supports the following DataAvailability(DA) modes: + +- `Calldata` - uses Ethereum tx calldata as pubdata storage +- `Blobs` - uses Ethereum blobs calldata as pubdata storage +- `No DA Validium` - posting pubdata is not enforced + +The goal is to create a general purpose solution, that would ensure DA consistency and verifiability, on top of which we would build what is requested by many partners and covers many use cases like on-chain games and DEXes: **Validium with Abstract DA.** + +This means that a separate solution like AvailDA, EigenDA, Celestia, etc. would be used to store the pubdata. The idea is that every solution like that (`DA layer`) provides a proof of inclusion of our pubdata to their storage, and this proof can later be verified on Ethereum. This results in an approach that has more security guarantees than `No DA Validium`, but lower fees than `Blobs`(assuming that Ethereum usage grows and blobs become more expensive). + +## Proposed solution + +The proposed solution is to introduce an abstract 3rd party DA layer, that the sequencer would publish the data to. When the batch is sealed, the hashes of the data related to that batch will be made available on L1. Then, after the DA layer has confirmed that its state is synchronized, the sequencer calls a `commitBatches` function with the proofs required to verify the DA inclusion on L1. + +### Challenges + +On the protocol level, the complexity is in introducing two new components: L1 and L2 DA verifiers. They are required to ensure the verifiable delivery of the DA inclusion proofs to L1 and consequent verification of these proofs. + +The L2 verifier would validate the pubdata correctness and compute a final commitment for DA called `outputHash`. It consists of hashes of `L2→L1 logs and messages`, `bytecodes`, and `compressed state diffs`(blob hashes in case of blobs). This contract has to be deployed by the chain operator and it has to be tied to the DA layer logic, e.g. DA layer accepts 256kb blobs → on the final hash computation stage, the pubdata has to be packed into the chunks of <256kb, and a either the hashes of all blobs, or a rolling hash has to be be part of the `outputHash` preimage. + +The `outputHash` will be sent to L1 as a L2→L1 log, so this process is a part of a bootloader execution and can be trusted. + +The hashes of data chunks alongside the inclusion proofs have to be provided in the calldata of the L1 diamond proxy’s `commitBatches` function. + +L1 contracts have to recalculate the `outputHash` and make sure it matches the one from the logs, after which the abstract DA verification contract is called. In general terms, it would accept the set of chunk’s hashes (by chunk here I mean DA blob, not to be confused with 4844 blob) and a set of inclusion proofs, that should be enough to verify that the preimage (chunk data) is included in the DA layer. This verification would be done by specific contract e.g. `Attestation Bridge`, which holds the state tree information and can perform verification against it. diff --git a/docs/gateway/messaging-via-gateway.md b/docs/gateway/messaging-via-gateway.md new file mode 100644 index 000000000..bfffdb429 --- /dev/null +++ b/docs/gateway/messaging-via-gateway.md @@ -0,0 +1,23 @@ +# Messaging via Gateway + +Messaging for chains using the Gateway has two components, receiving messages from L1 and sending messages to it. + +## L1 -> Gateway -> ZK Chain transactions + +Transaction are sent from the Chain's Mailbox (on L1) to the Gateway's L1 Mailbox. From here they are routed to the BH on the Gateway, which forwards them to the Chain's Mailbox on the Gateway. + +![Direct L1->L2 messaging](./L1-L2.png) + +![L1->L2 messaging via the Gateway](./L1->GM->Chain.png) + +## ZK Chain -> Gateway -> L1 transactions + +Messages sent from the ZK Chain to L1 are aggregated in the MessageRoot contract on the Gateway. Logs are first aggregated for a single chain across batches in a DynamicIncrementleMerkleTree, and then the roots of chains are aggregated in a FullMerkle Tree. + +![MessageRoot aggregation on the Gateway](./MessageRoot.png) + +The benefits of this architecture are: + +- the DI Merkle Tree allows chains to send multiple batches after each other, while not "burying" older batches (the Merkle paths to them will not be long unlike in a rolling hash solution). +- the Full Merkle Tree allows each chain to update the tree individually, they only need to recalculate a single branch. Storage is cheaper on the Gateway than on L1. +- Rollups publish all their L2->L1 transaction data, but for Validiums chains need to have access to the chain-level message roots to be able to construct merkle paths to every message. The Full Merkle provides this. diff --git a/docs/gateway/priority-merkle-tree.md b/docs/gateway/priority-merkle-tree.md new file mode 100644 index 000000000..ffd52237d --- /dev/null +++ b/docs/gateway/priority-merkle-tree.md @@ -0,0 +1,135 @@ +# Migrating Priority Queue to Merkle Tree + +## Overview of the current implementation + +Priority queue is a data structure in Era contracts that is used to handle L1->L2 priority operations. It supports the following: + +- inserting a new operation into the end of the queue +- checking that an newly executed batch executed some n first priority operations from the queue (and not some other ones) in correct order + +The queue itself only stores the following: + +```solidity +struct PriorityOperation { + bytes32 canonicalTxHash; + uint64 expirationTimestamp; + uint192 layer2Tip; +} +``` + +of which we only care about the canonical hash. + +### Inserting new operations + +The queue is implemented as a [library](https://github.com/matter-labs/era-contracts/blob/f3630fcb01ad8b6e2e423a6f313abefe8502c3a2/l1-contracts/contracts/zksync/libraries/PriorityQueue.sol#L20). +For each incoming priority operation, we simply `pushBack` its hash, expiration and layer2Tip. + +### Checking validity + +When a new batch is executed, we need to check that operations that were executed there match the operations in the priority queue. The batch header contains `numberOfLayer1Txs` and `priorityOperationsHash` which is a rolling hash of all priority operations that were executed in the batch. Bootloader check that this hash indeed corresponds to all priority operations that have been executed in that batch. The contract only checks that this hash matches the operations stored in the queue: + +```solidity +/// @dev Pops the priority operations from the priority queue and returns a rolling hash of operations +function _collectOperationsFromPriorityQueue(uint256 _nPriorityOps) internal returns (bytes32 concatHash) { + concatHash = EMPTY_STRING_KECCAK; + + for (uint256 i = 0; i < _nPriorityOps; i = i.uncheckedInc()) { + PriorityOperation memory priorityOp = s.priorityQueue.popFront(); + concatHash = keccak256(abi.encode(concatHash, priorityOp.canonicalTxHash)); + } +} + +bytes32 priorityOperationsHash = _collectOperationsFromPriorityQueue(_storedBatch.numberOfLayer1Txs); +require(priorityOperationsHash == _storedBatch.priorityOperationsHash); // priority operations hash does not match to expected +``` + +As can be seen, this is done in `O(n)` compute, where `n` is the number of priority operations in the batch. + +## Motivation for migration to Merkle Tree + +Since we will be introducing Sync Layer, we will need to support one more operation: + +- migrating priority queue from L1 to SL (and back) + +Current implementation takes `O(n)` space and is vulnerable to spam attacks during migration +(e.g. an attacker can insert a lot of priority operations and we won't be able to migrate all of them due to gas limits). + +Hence, we need an implementation with a small (constant- or log-size) space imprint that we can migrate to SL and back that would still allow us to perform the other 2 operations. + +Merkle tree of priority operations is perfect for this since we can simply migrate the latest root hash to SL and back. + +- It can still efficiently (in `O(height)`) insert new operations. +- It can also still efficiently (in `O(n)` compute and `O(n + height)` calldata) check that the batch’s `priorityOperationsHash` corresponds to the operations from the queue. + +Note that `n` here is the number of priority operations in the batch, not `2^height`. + +The implementation details are described below. + +### FAQ + +- Q: Why can't we just migrate the rolling hash of the operations in the existing priority queue? +- A: The rolling hash is not enough to check that the operations from the executed batch are indeed from the priority queue. We would need to store all historical rolling hashes, which would be `O(n)` space and would not solve the spam attack problem. + +## Implementation + +The implementation will consist of two parts: + +- Merkle tree on L1 contracts, to replace the existing priority queue (while still supporting the existing operations) +- Merkle tree off-chain on the server, to generate the merkle proofs for the executed priority operations. + +### Contracts + +On the contracts, the Merkle tree will be implemented as an Incremental (append-only) Merkle Tree ([example implementation](https://github.com/tornadocash/tornado-core/blob/master/contracts/MerkleTreeWithHistory.sol)), meaning that it can efficiently (in `O(height)` compute) append new elements to the right, while only storing `O(height)` nodes at all times. + +It will also be dynamically sized, meaning that it will double in size when the current size is not enough to store the new element. + +### Server + +On the server, the Merkle tree will be implemented as an extension of `MiniMerkleTree` currently used for L2->L1 logs. + +It will have the following properties: + +- in-memory: the tree will be stored in memory and will be rebuilt on each restart (details below). +- dynamically sized (to match the contracts implementation) +- append-only (to match the contracts implementation) + +The tree does not need to be super efficient, since we process on average 7 operations per batch. + +### Why in-memory? + +Having the tree in-memory means rebuilding the tree on each restart. This is fine because on mainnet after >1 year since release we have only 3.2M priority operations. We only have to fully rebuild the tree _once_ and then simply cache the already executed operations (which are the majority). Having the tree in-memory has an added benefit of not having to have additional infrastructure to store it on disk and not having to be bothered to rollback its state manually if we ever have to (as we do for e.g. for the storage logs tree). + +Note: If even rebuilding it once becomes a problem, it can be easily mitigated by only persisting the cache nodes. + +### Caching + +**Why do we need caching?** After a batch is successfully executed, we will no longer need to have the ability to generate merkle paths for those operations. This means that we can save space and compute by only fully storing the operations that are not yet executed, and caching the leaves +corresponding to the already executed operations. + +We will only cache some prefix of the tree, meaning nodes in the interval [0; N) where N is the number of executed priority operations. The cache will store the rightmost cached left-child node on each level of the tree (see diagrams). + +![Untitled](./PQ1.png) + +![Untitled](./PQ2.png) + +![Untitled](./PQ3.png) + +This means that we will not be able to generate merkle proofs for the cached nodes (and since they are already executed, we don't need to). This structure allows us to save a lot of space, since it only takes up `O(height)` space instead of linear space for all executed operations. This is a big optimization since there are currently 3.2M total operations but <10 non-executed operations in the mainnet priority queue, which means most of the tree will be cached. + +This also means we don’t really have to store non-leaf nodes other than cache, since we can calculate merkle root / merkle paths in `O(n)` where `n` is the number of non-executed operations (and not total number of operations), and since `n` is so small, it is really fast. + +### Adding new operations + +On the contracts, appending a new operation to the tree is done by simply calling `append` on the Incremental Merkle Tree, which will update at most `height` slots. Actually, it works almost exactly like the cache described above. Once again: [tornado-cash implementation](https://github.com/tornadocash/tornado-core/blob/1ef6a263ac6a0e476d063fcb269a9df65a1bd56a/contracts/MerkleTreeWithHistory.sol#L68). + +On the server, `eth_watch` will listen for `NewPriorityOperation` events as it does now, and will append the new operation to the tree on the server. + +### Checking validity + +To check that the executed batch indeed took its priority operations from the queue, we have to make sure that if we take first `numberOfL1Txs` non-executed operations from the tree, their rolling hash will match `priorityOperationsHash` . Since will not be storing the hashes of these operations onchain anymore, we will have to provide them as calldata. Additionally in calldata, we should provide merkle proofs for the **first and last** operations in that batch (hence `O(n + height)` calldata). This will make it possible to prove onchain that that contiguous interval of hashes indeed exists in the merkle tree. + +This can be done simply by constructing the part of the tree above this interval using the provided paths to first and last elements of the interval checking that computed merkle root matches with stored one (in `O(n)` where `n` is number of priority operations in a batch). We will also need to track the `index` of the first unexecuted operation onchain to properly calculate the merkle root and ensure that batches don’t execute some operations out of order or multiple times. + +We will also need to prove that the rolling hash of provided hashes matches with `priorityOperationsHash` which is also `O(n)` + +It is important to note that we should store some number of historical root hashes, since the Merkle tree on the server might lag behind the contracts a bit, and hence merkle paths generated on the server-side might become invalid if we compare them to the latest root hash on the contracts. These historical root hashes are not necessary to migrate to and from SL though. diff --git a/gas-bound-caller/README.md b/gas-bound-caller/README.md index 00f2868df..17b647539 100644 --- a/gas-bound-caller/README.md +++ b/gas-bound-caller/README.md @@ -46,4 +46,4 @@ Since `GasBoundCaller` would be the contract that calls the `_to` contract, the It should be deployed via a built-in CREATE2 factory on each individual chain. -The current address on both sepolia testnet and mainnet for zkSync Era is `0xc706EC7dfA5D4Dc87f29f859094165E8290530f5`. +The current address on both sepolia testnet and mainnet for ZKsync Era is `0xc706EC7dfA5D4Dc87f29f859094165E8290530f5`. diff --git a/gas-bound-caller/contracts/test-contracts/GasBoundCallerTester.sol b/gas-bound-caller/contracts/test-contracts/GasBoundCallerTester.sol index 8c1b790d6..1314bf0c0 100644 --- a/gas-bound-caller/contracts/test-contracts/GasBoundCallerTester.sol +++ b/gas-bound-caller/contracts/test-contracts/GasBoundCallerTester.sol @@ -57,9 +57,9 @@ contract GasBoundCallerTester is GasBoundCaller { } } - function testReturndataOverhead(uint256 len) external { + function testReturndataOverhead(uint256 _len, uint256 _gasForInner) external { uint256 gasbefore = gasleft(); - this.testReturndataOverheadInner(false, len); + this.testReturndataOverheadInner{gas: _gasForInner}(false, _len); lastRecordedGasLeft = gasbefore - gasleft(); } diff --git a/gas-bound-caller/contracts/test-contracts/SystemContractsCaller.sol b/gas-bound-caller/contracts/test-contracts/SystemContractsCaller.sol index ca7c870c7..1f154e270 100644 --- a/gas-bound-caller/contracts/test-contracts/SystemContractsCaller.sol +++ b/gas-bound-caller/contracts/test-contracts/SystemContractsCaller.sol @@ -6,7 +6,7 @@ import {MSG_VALUE_SYSTEM_CONTRACT, MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT} from "@mat import {Utils} from "@matterlabs/zksync-contracts/l2/system-contracts/libraries/Utils.sol"; // Addresses used for the compiler to be replaced with the -// zkSync-specific opcodes during the compilation. +// ZKsync-specific opcodes during the compilation. // IMPORTANT: these are just compile-time constants and are used // only if used in-place by Yul optimizer. address constant TO_L1_CALL_ADDRESS = address((1 << 16) - 1); diff --git a/gas-bound-caller/hardhat.config.ts b/gas-bound-caller/hardhat.config.ts index d46561e37..56c38a783 100644 --- a/gas-bound-caller/hardhat.config.ts +++ b/gas-bound-caller/hardhat.config.ts @@ -5,11 +5,28 @@ import "@matterlabs/hardhat-zksync-verify"; import "@nomiclabs/hardhat-ethers"; import "hardhat-typechain"; +// This version of system contracts requires a pre release of the compiler +const COMPILER_VERSION = "1.5.0"; +const PRE_RELEASE_VERSION = "prerelease-a167aa3-code4rena"; +function getZksolcUrl(): string { + // @ts-ignore + const platform = { darwin: "macosx", linux: "linux", win32: "windows" }[process.platform]; + // @ts-ignore + const toolchain = { linux: "-musl", win32: "-gnu", darwin: "" }[process.platform]; + const arch = process.arch === "x64" ? "amd64" : process.arch; + const ext = process.platform === "win32" ? ".exe" : ""; + + return `https://github.com/matter-labs/era-compiler-solidity/releases/download/${PRE_RELEASE_VERSION}/zksolc-${platform}-${arch}${toolchain}-v${COMPILER_VERSION}${ext}`; +} + +console.log(`Using zksolc from ${getZksolcUrl()}`); + export default { zksolc: { version: "1.5.0", compilerSource: "binary", settings: { + compilerPath: getZksolcUrl(), isSystem: true, }, }, diff --git a/gas-bound-caller/package.json b/gas-bound-caller/package.json index db6b3d8bf..1b144a8ff 100644 --- a/gas-bound-caller/package.json +++ b/gas-bound-caller/package.json @@ -13,12 +13,12 @@ "eslint-plugin-prettier": "^5.0.1", "ethers": "^5.7.0", "fast-glob": "^3.3.2", - "hardhat": "^2.18.3", + "hardhat": "=2.22.2", "preprocess": "^3.2.0", "zksync-ethers": "^5.9.0" }, "devDependencies": { - "@matterlabs/hardhat-zksync-chai-matchers": "^0.1.4", + "@matterlabs/hardhat-zksync-chai-matchers": "^0.2.0", "@matterlabs/hardhat-zksync-node": "^0.0.1-beta.7", "@matterlabs/hardhat-zksync-verify": "0.6.1", "@nomicfoundation/hardhat-chai-matchers": "^1.0.3", @@ -57,7 +57,7 @@ "test-node": "hardhat node-zksync --tag v0.0.1-vm1.5.0", "check-canonical-bytecode": "ts-node ./scripts/check-canonical-bytecode.ts", "verify": "hardhat run scripts/verify.ts", - "deploy-on-hyperchain": "ts-node ./scripts/deploy-on-hyperchain.ts", + "deploy-on-zk-chain": "ts-node ./scripts/deploy-on-zk-chain.ts", "deploy-on-localhost": "hardhat deploy --network localhost" } } diff --git a/gas-bound-caller/scripts/deploy-on-hyperchain.ts b/gas-bound-caller/scripts/deploy-on-hyperchain.ts index 35d013fd7..228524de4 100644 --- a/gas-bound-caller/scripts/deploy-on-hyperchain.ts +++ b/gas-bound-caller/scripts/deploy-on-hyperchain.ts @@ -44,8 +44,8 @@ async function main() { program .version("0.1.0") - .name("Deploy on hyperchain") - .description("Deploys the GasBoundCaller on a predetermined Hyperchain network") + .name("Deploy on ZK chain") + .description("Deploys the GasBoundCaller on a predetermined ZK chain network") .option("--private-key ") .option("--l2Rpc ") .action(async (cmd) => { diff --git a/gas-bound-caller/test/GasBoundCaller.spec.ts b/gas-bound-caller/test/GasBoundCaller.spec.ts index 1a970f0fc..5b92e298f 100644 --- a/gas-bound-caller/test/GasBoundCaller.spec.ts +++ b/gas-bound-caller/test/GasBoundCaller.spec.ts @@ -39,15 +39,16 @@ describe("GasBoundCaller tests", function () { }); it("test returndata overhead", async () => { + // The tests' behavior depends on the amount of gas provided to its inner part, so we always provide 40kk await ( - await tester.testReturndataOverhead(10, { + await tester.testReturndataOverhead(10, 40_000_000, { gasLimit: 80_000_000, }) ).wait(); const smallBytecodeGas = await tester.lastRecordedGasLeft(); await ( - await tester.testReturndataOverhead(100000, { + await tester.testReturndataOverhead(100000, 40_000_000, { gasLimit: 80_000_000, }) ).wait(); diff --git a/high-level-design-bridging.png b/high-level-design-bridging.png new file mode 100644 index 000000000..a8f8d8f49 Binary files /dev/null and b/high-level-design-bridging.png differ diff --git a/l1-contracts/.env b/l1-contracts/.env index 10bbdf102..0cbe2dbd1 100644 --- a/l1-contracts/.env +++ b/l1-contracts/.env @@ -24,8 +24,12 @@ CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR=0x000000000000000000000000000000000000000 CONTRACTS_GOVERNANCE_ADDR=0x0000000000000000000000000000000000000000 CONTRACTS_L1_ERC20_BRIDGE_IMPL_ADDR=0x0000000000000000000000000000000000000000 CONTRACTS_L1_ERC20_BRIDGE_PROXY_ADDR=0x0000000000000000000000000000000000000000 +CONTRACTS_L1_NULLIFIER_IMPL_ADDR=0x0000000000000000000000000000000000000000 +CONTRACTS_L1_NULLIFIER_PROXY_ADDR=0x0000000000000000000000000000000000000000 CONTRACTS_L1_SHARED_BRIDGE_IMPL_ADDR=0x0000000000000000000000000000000000000000 CONTRACTS_L1_SHARED_BRIDGE_PROXY_ADDR=0x0000000000000000000000000000000000000000 +CONTRACTS_L1_BRIDGED_STANDARD_ERC20_IMPL_ADDR=0x0000000000000000000000000000000000000000 +CONTRACTS_L1_BRIDGED_TOKEN_BEACON_ADDR=0x0000000000000000000000000000000000000000 CONTRACTS_L1_ALLOW_LIST_ADDR=0x0000000000000000000000000000000000000000 CONTRACTS_CREATE2_FACTORY_ADDR=0x0000000000000000000000000000000000000000 CONTRACTS_VALIDATOR_TIMELOCK_ADDR=0x0000000000000000000000000000000000000000 @@ -33,4 +37,11 @@ CONTRACTS_VALIDATOR_TIMELOCK_EXECUTION_DELAY=0 ETH_SENDER_SENDER_OPERATOR_COMMIT_ETH_ADDR=0x0000000000000000000000000000000000000000 ETH_SENDER_SENDER_OPERATOR_BLOBS_ETH_ADDR=0x0000000000000000000000000000000000000001 CONTRACTS_SHARED_BRIDGE_UPGRADE_STORAGE_SWITCH=0 -CONTRACTS_MAX_NUMBER_OF_HYPERCHAINS=100 \ No newline at end of file +CONTRACTS_MAX_NUMBER_OF_ZK_CHAINS=100 +L1_CONFIG=/script-config/config-deploy-l1.toml +L1_OUTPUT=/script-out/output-deploy-l1.toml +TOKENS_CONFIG=/script-config/config-deploy-erc20.toml +ZK_CHAIN_CONFIG=/script-config/register-zk-chain.toml +ZK_CHAIN_OUTPUT=/script-out/output-deploy-zk-chain-era.toml +FORCE_DEPLOYMENTS_CONFIG=/script-config/generate-force-deployments-data.toml +GATEWAY_PREPARATION_L1_CONFIG=/script-config/gateway-preparation-l1.toml diff --git a/l1-contracts/README.md b/l1-contracts/README.md index 30ffc8399..8fb04bb86 100644 --- a/l1-contracts/README.md +++ b/l1-contracts/README.md @@ -1,10 +1,10 @@ -# zkSync Era: L1 Contracts +# ZKsync Era: L1 Contracts [![Logo](../eraLogo.svg)](https://zksync.io/) -zkSync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or +ZKsync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or decentralization. Since it's EVM compatible (Solidity/Vyper), 99% of Ethereum projects can redeploy without refactoring -or re-auditing a single line of code. zkSync Era also uses an LLVM-based compiler that will eventually let developers +or re-auditing a single line of code. ZKsync Era also uses an LLVM-based compiler that will eventually let developers write smart contracts in C++, Rust and other popular languages. ## L1 Contracts diff --git a/l1-contracts/contracts/bridge/BridgeHelper.sol b/l1-contracts/contracts/bridge/BridgeHelper.sol new file mode 100644 index 000000000..bcc59327f --- /dev/null +++ b/l1-contracts/contracts/bridge/BridgeHelper.sol @@ -0,0 +1,36 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-custom-errors + +import {IERC20Metadata} from "@openzeppelin/contracts-v4/token/ERC20/extensions/IERC20Metadata.sol"; +import {ETH_TOKEN_ADDRESS} from "../common/Config.sol"; +import {DataEncoding} from "../common/libraries/DataEncoding.sol"; + +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice Helper library for working with L2 contracts on L1. + */ +library BridgeHelper { + /// @dev Receives and parses (name, symbol, decimals) from the token contract + function getERC20Getters(address _token, uint256 _originChainId) internal view returns (bytes memory) { + bytes memory name; + bytes memory symbol; + bytes memory decimals; + if (_token == ETH_TOKEN_ADDRESS) { + // when depositing eth to a non-eth based chain it is an ERC20 + name = abi.encode("Ether"); + symbol = abi.encode("ETH"); + decimals = abi.encode(uint8(18)); + } else { + /// note this also works on the L2 for the base token. + (, name) = _token.staticcall(abi.encodeCall(IERC20Metadata.name, ())); + (, symbol) = _token.staticcall(abi.encodeCall(IERC20Metadata.symbol, ())); + (, decimals) = _token.staticcall(abi.encodeCall(IERC20Metadata.decimals, ())); + } + return + DataEncoding.encodeTokenData({_chainId: _originChainId, _name: name, _symbol: symbol, _decimals: decimals}); + } +} diff --git a/l2-contracts/contracts/bridge/L2StandardERC20.sol b/l1-contracts/contracts/bridge/BridgedStandardERC20.sol similarity index 64% rename from l2-contracts/contracts/bridge/L2StandardERC20.sol rename to l1-contracts/contracts/bridge/BridgedStandardERC20.sol index d72608368..d848dcbac 100644 --- a/l2-contracts/contracts/bridge/L2StandardERC20.sol +++ b/l1-contracts/contracts/bridge/BridgedStandardERC20.sol @@ -1,18 +1,22 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; -import {ERC20PermitUpgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/extensions/draft-ERC20PermitUpgradeable.sol"; -import {UpgradeableBeacon} from "@openzeppelin/contracts/proxy/beacon/UpgradeableBeacon.sol"; -import {ERC1967Upgrade} from "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol"; +import {ERC20PermitUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/token/ERC20/extensions/draft-ERC20PermitUpgradeable.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {ERC1967Upgrade} from "@openzeppelin/contracts-v4/proxy/ERC1967/ERC1967Upgrade.sol"; -import {IL2StandardToken} from "./interfaces/IL2StandardToken.sol"; +import {IBridgedStandardToken} from "./interfaces/IBridgedStandardToken.sol"; +import {Unauthorized, NonSequentialVersion, ZeroAddress} from "../common/L1ContractErrors.sol"; +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "../common/L2ContractAddresses.sol"; +import {DataEncoding} from "../common/libraries/DataEncoding.sol"; +import {INativeTokenVault} from "../bridge/ntv/INativeTokenVault.sol"; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev /// @notice The ERC20 token implementation, that is used in the "default" ERC20 bridge. Note, that it does not /// support any custom token logic, i.e. rebase tokens' functionality is not supported. -contract L2StandardERC20 is ERC20PermitUpgradeable, IL2StandardToken, ERC1967Upgrade { +contract BridgedStandardERC20 is ERC20PermitUpgradeable, IBridgedStandardToken, ERC1967Upgrade { /// @dev Describes whether there is a specific getter in the token. /// @notice Used to explicitly separate which getters the token has and which it does not. /// @notice Different tokens in L1 can implement or not implement getter function as `name`/`symbol`/`decimals`, @@ -30,11 +34,45 @@ contract L2StandardERC20 is ERC20PermitUpgradeable, IL2StandardToken, ERC1967Upg /// @notice OpenZeppelin token represents `name` and `symbol` as storage variables and `decimals` as constant. uint8 private decimals_; + /// @notice The l2Bridge now is deprecated, use the L2AssetRouter and L2NativeTokenVault instead. /// @dev Address of the L2 bridge that is used as trustee who can mint/burn tokens address public override l2Bridge; - /// @dev Address of the L1 token that can be deposited to mint this L2 token - address public override l1Address; + /// @dev Address of the token on its origin chain that can be deposited to mint this bridged token + address public override originToken; + + /// @dev Address of the native token vault that is used as trustee who can mint/burn tokens + address public nativeTokenVault; + + /// @dev The assetId of the token. + bytes32 public assetId; + + /// @dev This also sets the native token vault to the default value if it is not set. + /// It is not set only on the L2s for legacy tokens. + modifier onlyNTV() { + address ntv = nativeTokenVault; + if (ntv == address(0)) { + ntv = L2_NATIVE_TOKEN_VAULT_ADDR; + nativeTokenVault = L2_NATIVE_TOKEN_VAULT_ADDR; + assetId = DataEncoding.encodeNTVAssetId( + INativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).L1_CHAIN_ID(), + originToken + ); + } + if (msg.sender != ntv) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyNextVersion(uint8 _version) { + // The version should be incremented by 1. Otherwise, the governor risks disabling + // future reinitialization of the token by providing too large a version. + if (_version != _getInitializedVersion() + 1) { + revert NonSequentialVersion(); + } + _; + } /// @dev Contract is expected to be used as proxy implementation. constructor() { @@ -44,20 +82,26 @@ contract L2StandardERC20 is ERC20PermitUpgradeable, IL2StandardToken, ERC1967Upg /// @notice Initializes a contract token for later use. Expected to be used in the proxy. /// @dev Stores the L1 address of the bridge and set `name`/`symbol`/`decimals` getters that L1 token has. - /// @param _l1Address Address of the L1 token that can be deposited to mint this L2 token + /// @param _assetId The assetId of the token. + /// @param _originToken Address of the origin token that can be deposited to mint this bridged token /// @param _data The additional data that the L1 bridge provide for initialization. /// In this case, it is packed `name`/`symbol`/`decimals` of the L1 token. - function bridgeInitialize(address _l1Address, bytes memory _data) external initializer { - require(_l1Address != address(0), "in6"); // Should be non-zero address - l1Address = _l1Address; + function bridgeInitialize( + bytes32 _assetId, + address _originToken, + bytes calldata _data + ) external initializer returns (uint256) { + if (_originToken == address(0)) { + revert ZeroAddress(); + } + originToken = _originToken; + assetId = _assetId; - l2Bridge = msg.sender; + nativeTokenVault = msg.sender; // We parse the data exactly as they were created on the L1 bridge - (bytes memory nameBytes, bytes memory symbolBytes, bytes memory decimalsBytes) = abi.decode( - _data, - (bytes, bytes, bytes) - ); + (uint256 chainId, bytes memory nameBytes, bytes memory symbolBytes, bytes memory decimalsBytes) = DataEncoding + .decodeTokenData(_data); ERC20Getters memory getters; string memory decodedName; @@ -98,7 +142,8 @@ contract L2StandardERC20 is ERC20PermitUpgradeable, IL2StandardToken, ERC1967Upg } availableGetters = getters; - emit BridgeInitialize(_l1Address, decodedName, decodedSymbol, decimals_); + emit BridgeInitialize(_originToken, decodedName, decodedSymbol, decimals_); + return chainId; } /// @notice A method to be called by the governor to update the token's metadata. @@ -110,39 +155,29 @@ contract L2StandardERC20 is ERC20PermitUpgradeable, IL2StandardToken, ERC1967Upg /// to ensure that the governor can not accidentally disable future reinitialization of the token. function reinitializeToken( ERC20Getters calldata _availableGetters, - string memory _newName, - string memory _newSymbol, + string calldata _newName, + string calldata _newSymbol, uint8 _version ) external onlyNextVersion(_version) reinitializer(_version) { // It is expected that this token is deployed as a beacon proxy, so we'll // allow the governor of the beacon to reinitialize the token. address beaconAddress = _getBeacon(); - require(msg.sender == UpgradeableBeacon(beaconAddress).owner(), "tt"); + if (msg.sender != UpgradeableBeacon(beaconAddress).owner()) { + revert Unauthorized(msg.sender); + } __ERC20_init_unchained(_newName, _newSymbol); __ERC20Permit_init(_newName); availableGetters = _availableGetters; - emit BridgeInitialize(l1Address, _newName, _newSymbol, decimals_); - } - - modifier onlyBridge() { - require(msg.sender == l2Bridge, "xnt"); // Only L2 bridge can call this method - _; - } - - modifier onlyNextVersion(uint8 _version) { - // The version should be incremented by 1. Otherwise, the governor risks disabling - // future reinitialization of the token by providing too large a version. - require(_version == _getInitializedVersion() + 1, "v"); - _; + emit BridgeInitialize(originToken, _newName, _newSymbol, decimals_); } /// @dev Mint tokens to a given account. /// @param _to The account that will receive the created tokens. /// @param _amount The amount that will be created. /// @notice Should be called by bridge after depositing tokens from L1. - function bridgeMint(address _to, uint256 _amount) external override onlyBridge { + function bridgeMint(address _to, uint256 _amount) external override onlyNTV { _mint(_to, _amount); emit BridgeMint(_to, _amount); } @@ -151,36 +186,49 @@ contract L2StandardERC20 is ERC20PermitUpgradeable, IL2StandardToken, ERC1967Upg /// @param _from The account from which tokens will be burned. /// @param _amount The amount that will be burned. /// @notice Should be called by bridge before withdrawing tokens to L1. - function bridgeBurn(address _from, uint256 _amount) external override onlyBridge { + function bridgeBurn(address _from, uint256 _amount) external override onlyNTV { _burn(_from, _amount); emit BridgeBurn(_from, _amount); } + /// @dev External function to decode a string from bytes. + function decodeString(bytes calldata _input) external pure returns (string memory result) { + (result) = abi.decode(_input, (string)); + } + + /// @dev External function to decode a uint8 from bytes. + function decodeUint8(bytes calldata _input) external pure returns (uint8 result) { + (result) = abi.decode(_input, (uint8)); + } + function name() public view override returns (string memory) { // If method is not available, behave like a token that does not implement this method - revert on call. + // solhint-disable-next-line reason-string, gas-custom-errors if (availableGetters.ignoreName) revert(); return super.name(); } function symbol() public view override returns (string memory) { // If method is not available, behave like a token that does not implement this method - revert on call. + // solhint-disable-next-line reason-string, gas-custom-errors if (availableGetters.ignoreSymbol) revert(); return super.symbol(); } function decimals() public view override returns (uint8) { // If method is not available, behave like a token that does not implement this method - revert on call. + // solhint-disable-next-line reason-string, gas-custom-errors if (availableGetters.ignoreDecimals) revert(); return decimals_; } - /// @dev External function to decode a string from bytes. - function decodeString(bytes memory _input) external pure returns (string memory result) { - (result) = abi.decode(_input, (string)); - } + /*////////////////////////////////////////////////////////////// + LEGACY FUNCTIONS + //////////////////////////////////////////////////////////////*/ - /// @dev External function to decode a uint8 from bytes. - function decodeUint8(bytes memory _input) external pure returns (uint8 result) { - (result) = abi.decode(_input, (uint8)); + /// @notice Returns the address of the token on its native chain. + /// Legacy for the l2 bridge. + function l1Address() public view override returns (address) { + return originToken; } } diff --git a/l1-contracts/contracts/bridge/L1ERC20Bridge.sol b/l1-contracts/contracts/bridge/L1ERC20Bridge.sol index da3098969..0cd72ccc7 100644 --- a/l1-contracts/contracts/bridge/L1ERC20Bridge.sol +++ b/l1-contracts/contracts/bridge/L1ERC20Bridge.sol @@ -2,46 +2,60 @@ pragma solidity 0.8.24; -import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; -import {SafeERC20} from "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; import {IL1ERC20Bridge} from "./interfaces/IL1ERC20Bridge.sol"; -import {IL1SharedBridge} from "./interfaces/IL1SharedBridge.sol"; +import {IL1Nullifier, FinalizeL1DepositParams} from "./interfaces/IL1Nullifier.sol"; +import {IL1NativeTokenVault} from "./ntv/IL1NativeTokenVault.sol"; +import {IL1AssetRouter} from "./asset-router/IL1AssetRouter.sol"; import {L2ContractHelper} from "../common/libraries/L2ContractHelper.sol"; import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; +import {EmptyDeposit, WithdrawalAlreadyFinalized, TokensWithFeesNotSupported, ETHDepositNotSupported} from "../common/L1ContractErrors.sol"; +import {ETH_TOKEN_ADDRESS} from "../common/Config.sol"; + /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -/// @notice Smart contract that allows depositing ERC20 tokens from Ethereum to hyperchains -/// @dev It is a legacy bridge from zkSync Era, that was deprecated in favour of shared bridge. +/// @notice Smart contract that allows depositing ERC20 tokens from Ethereum to ZK chains +/// @dev It is a legacy bridge from ZKsync Era, that was deprecated in favour of shared bridge. /// It is needed for backward compatibility with already integrated projects. contract L1ERC20Bridge is IL1ERC20Bridge, ReentrancyGuard { using SafeERC20 for IERC20; /// @dev The shared bridge that is now used for all bridging, replacing the legacy contract. - IL1SharedBridge public immutable override SHARED_BRIDGE; + IL1Nullifier public immutable override L1_NULLIFIER; + + /// @dev The asset router, which holds deposited tokens. + IL1AssetRouter public immutable override L1_ASSET_ROUTER; + + /// @dev The native token vault, which holds deposited tokens. + IL1NativeTokenVault public immutable override L1_NATIVE_TOKEN_VAULT; + + /// @dev The chainId of Era + uint256 public immutable ERA_CHAIN_ID; /// @dev A mapping L2 batch number => message number => flag. - /// @dev Used to indicate that L2 -> L1 message was already processed for zkSync Era withdrawals. + /// @dev Used to indicate that L2 -> L1 message was already processed for ZKsync Era withdrawals. // slither-disable-next-line uninitialized-state mapping(uint256 l2BatchNumber => mapping(uint256 l2ToL1MessageNumber => bool isFinalized)) public isWithdrawalFinalized; /// @dev A mapping account => L1 token address => L2 deposit transaction hash => amount. - /// @dev Used for saving the number of deposited funds, to claim them in case the deposit transaction will fail in zkSync Era. + /// @dev Used for saving the number of deposited funds, to claim them in case the deposit transaction will fail in ZKsync Era. mapping(address account => mapping(address l1Token => mapping(bytes32 depositL2TxHash => uint256 amount))) public depositAmount; - /// @dev The address that is used as a L2 bridge counterpart in zkSync Era. + /// @dev The address that is used as a L2 bridge counterpart in ZKsync Era. // slither-disable-next-line uninitialized-state address public l2Bridge; - /// @dev The address that is used as a beacon for L2 tokens in zkSync Era. + /// @dev The address that is used as a beacon for L2 tokens in ZKsync Era. // slither-disable-next-line uninitialized-state address public l2TokenBeacon; - /// @dev Stores the hash of the L2 token proxy contract's bytecode on zkSync Era. + /// @dev Stores the hash of the L2 token proxy contract's bytecode on ZKsync Era. // slither-disable-next-line uninitialized-state bytes32 public l2TokenProxyBytecodeHash; @@ -56,32 +70,21 @@ contract L1ERC20Bridge is IL1ERC20Bridge, ReentrancyGuard { /// @dev Contract is expected to be used as proxy implementation. /// @dev Initialize the implementation to prevent Parity hack. - constructor(IL1SharedBridge _sharedBridge) reentrancyGuardInitializer { - SHARED_BRIDGE = _sharedBridge; + constructor( + IL1Nullifier _nullifier, + IL1AssetRouter _assetRouter, + IL1NativeTokenVault _nativeTokenVault, + uint256 _eraChainId + ) reentrancyGuardInitializer { + L1_NULLIFIER = _nullifier; + L1_ASSET_ROUTER = _assetRouter; + L1_NATIVE_TOKEN_VAULT = _nativeTokenVault; + ERA_CHAIN_ID = _eraChainId; } /// @dev Initializes the reentrancy guard. Expected to be used in the proxy. function initialize() external reentrancyGuardInitializer {} - /// @dev transfer token to shared bridge as part of upgrade - function transferTokenToSharedBridge(address _token) external { - require(msg.sender == address(SHARED_BRIDGE), "Not shared bridge"); - uint256 amount = IERC20(_token).balanceOf(address(this)); - IERC20(_token).safeTransfer(address(SHARED_BRIDGE), amount); - } - - /*////////////////////////////////////////////////////////////// - ERA LEGACY GETTERS - //////////////////////////////////////////////////////////////*/ - - /// @return The L2 token address that would be minted for deposit of the given L1 token on zkSync Era. - function l2TokenAddress(address _l1Token) external view returns (address) { - bytes32 constructorInputHash = keccak256(abi.encode(l2TokenBeacon, "")); - bytes32 salt = bytes32(uint256(uint160(_l1Token))); - - return L2ContractHelper.computeCreate2Address(l2Bridge, salt, l2TokenProxyBytecodeHash, constructorInputHash); - } - /*////////////////////////////////////////////////////////////// ERA LEGACY FUNCTIONS //////////////////////////////////////////////////////////////*/ @@ -115,6 +118,36 @@ contract L1ERC20Bridge is IL1ERC20Bridge, ReentrancyGuard { }); } + /// @notice Finalize the withdrawal and release funds + /// @param _l2BatchNumber The L2 batch number where the withdrawal was processed + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message + /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent + /// @param _message The L2 withdraw data, stored in an L2 -> L1 message + /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization + function finalizeWithdrawal( + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes calldata _message, + bytes32[] calldata _merkleProof + ) external nonReentrant { + if (isWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex]) { + revert WithdrawalAlreadyFinalized(); + } + // We don't need to set finalizeWithdrawal here, as we set it in the shared bridge + + FinalizeL1DepositParams memory finalizeWithdrawalParams = FinalizeL1DepositParams({ + chainId: ERA_CHAIN_ID, + l2BatchNumber: _l2BatchNumber, + l2MessageIndex: _l2MessageIndex, + l2Sender: L1_NULLIFIER.l2BridgeAddress(ERA_CHAIN_ID), + l2TxNumberInBatch: _l2TxNumberInBatch, + message: _message, + merkleProof: _merkleProof + }); + L1_NULLIFIER.finalizeDeposit(finalizeWithdrawalParams); + } + /// @notice Initiates a deposit by locking funds on the contract and sending the request /// @dev Initiates a deposit by locking funds on the contract and sending the request /// of processing an L2 transaction where tokens would be minted @@ -148,12 +181,21 @@ contract L1ERC20Bridge is IL1ERC20Bridge, ReentrancyGuard { uint256 _l2TxGasPerPubdataByte, address _refundRecipient ) public payable nonReentrant returns (bytes32 l2TxHash) { - require(_amount != 0, "0T"); // empty deposit - uint256 amount = _depositFundsToSharedBridge(msg.sender, IERC20(_l1Token), _amount); - require(amount == _amount, "3T"); // The token has non-standard transfer logic + if (_amount == 0) { + // empty deposit amount + revert EmptyDeposit(); + } + if (_l1Token == ETH_TOKEN_ADDRESS) { + revert ETHDepositNotSupported(); + } + uint256 amount = _depositFundsToAssetRouter(msg.sender, IERC20(_l1Token), _amount); + if (amount != _amount) { + // The token has non-standard transfer logic + revert TokensWithFeesNotSupported(); + } - l2TxHash = SHARED_BRIDGE.depositLegacyErc20Bridge{value: msg.value}({ - _msgSender: msg.sender, + l2TxHash = L1_ASSET_ROUTER.depositLegacyErc20Bridge{value: msg.value}({ + _originalCaller: msg.sender, _l2Receiver: _l2Receiver, _l1Token: _l1Token, _amount: _amount, @@ -162,16 +204,25 @@ contract L1ERC20Bridge is IL1ERC20Bridge, ReentrancyGuard { _refundRecipient: _refundRecipient }); depositAmount[msg.sender][_l1Token][l2TxHash] = _amount; - // solhint-disable-next-line func-named-parameters - emit DepositInitiated(l2TxHash, msg.sender, _l2Receiver, _l1Token, _amount); + emit DepositInitiated({ + l2DepositTxHash: l2TxHash, + from: msg.sender, + to: _l2Receiver, + l1Token: _l1Token, + amount: _amount + }); } - /// @dev Transfers tokens from the depositor address to the shared bridge address. + /*////////////////////////////////////////////////////////////// + ERA LEGACY FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @dev Transfers tokens from the depositor address to the native token vault address. /// @return The difference between the contract balance before and after the transferring of funds. - function _depositFundsToSharedBridge(address _from, IERC20 _token, uint256 _amount) internal returns (uint256) { - uint256 balanceBefore = _token.balanceOf(address(SHARED_BRIDGE)); - _token.safeTransferFrom(_from, address(SHARED_BRIDGE), _amount); - uint256 balanceAfter = _token.balanceOf(address(SHARED_BRIDGE)); + function _depositFundsToAssetRouter(address _from, IERC20 _token, uint256 _amount) internal returns (uint256) { + uint256 balanceBefore = _token.balanceOf(address(L1_ASSET_ROUTER)); + _token.safeTransferFrom(_from, address(L1_ASSET_ROUTER), _amount); + uint256 balanceAfter = _token.balanceOf(address(L1_ASSET_ROUTER)); return balanceAfter - balanceBefore; } @@ -194,10 +245,13 @@ contract L1ERC20Bridge is IL1ERC20Bridge, ReentrancyGuard { bytes32[] calldata _merkleProof ) external nonReentrant { uint256 amount = depositAmount[_depositSender][_l1Token][_l2TxHash]; - require(amount != 0, "2T"); // empty deposit + // empty deposit + if (amount == 0) { + revert EmptyDeposit(); + } delete depositAmount[_depositSender][_l1Token][_l2TxHash]; - SHARED_BRIDGE.claimFailedDepositLegacyErc20Bridge({ + L1_NULLIFIER.claimFailedDepositLegacyErc20Bridge({ _depositSender: _depositSender, _l1Token: _l1Token, _amount: amount, @@ -210,29 +264,14 @@ contract L1ERC20Bridge is IL1ERC20Bridge, ReentrancyGuard { emit ClaimedFailedDeposit(_depositSender, _l1Token, amount); } - /// @notice Finalize the withdrawal and release funds - /// @param _l2BatchNumber The L2 batch number where the withdrawal was processed - /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent - /// @param _message The L2 withdraw data, stored in an L2 -> L1 message - /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization - function finalizeWithdrawal( - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external nonReentrant { - require(!isWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex], "pw"); - // We don't need to set finalizeWithdrawal here, as we set it in the shared bridge + /*////////////////////////////////////////////////////////////// + ERA LEGACY GETTERS + //////////////////////////////////////////////////////////////*/ - (address l1Receiver, address l1Token, uint256 amount) = SHARED_BRIDGE.finalizeWithdrawalLegacyErc20Bridge({ - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _message: _message, - _merkleProof: _merkleProof - }); - emit WithdrawalFinalized(l1Receiver, l1Token, amount); + /// @return The L2 token address that would be minted for deposit of the given L1 token on ZKsync Era. + function l2TokenAddress(address _l1Token) external view returns (address) { + bytes32 constructorInputHash = keccak256(abi.encode(l2TokenBeacon, "")); + bytes32 salt = bytes32(uint256(uint160(_l1Token))); + return L2ContractHelper.computeCreate2Address(l2Bridge, salt, l2TokenProxyBytecodeHash, constructorInputHash); } } diff --git a/l1-contracts/contracts/bridge/L1Nullifier.sol b/l1-contracts/contracts/bridge/L1Nullifier.sol new file mode 100644 index 000000000..6e624d723 --- /dev/null +++ b/l1-contracts/contracts/bridge/L1Nullifier.sol @@ -0,0 +1,744 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable reason-string, gas-custom-errors + +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/security/PausableUpgradeable.sol"; + +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; + +import {NEW_ENCODING_VERSION, LEGACY_ENCODING_VERSION} from "./asset-router/IAssetRouterBase.sol"; +import {IL1NativeTokenVault} from "./ntv/IL1NativeTokenVault.sol"; + +import {IL1ERC20Bridge} from "./interfaces/IL1ERC20Bridge.sol"; +import {IL1AssetRouter} from "./asset-router/IL1AssetRouter.sol"; +import {IAssetRouterBase} from "./asset-router/IAssetRouterBase.sol"; + +import {IL1Nullifier, FinalizeL1DepositParams} from "./interfaces/IL1Nullifier.sol"; + +import {IGetters} from "../state-transition/chain-interfaces/IGetters.sol"; +import {IMailbox} from "../state-transition/chain-interfaces/IMailbox.sol"; +import {L2Message, TxStatus} from "../common/Messaging.sol"; +import {UnsafeBytes} from "../common/libraries/UnsafeBytes.sol"; +import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; +import {ETH_TOKEN_ADDRESS} from "../common/Config.sol"; +import {DataEncoding} from "../common/libraries/DataEncoding.sol"; + +import {IBridgehub} from "../bridgehub/IBridgehub.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, L2_ASSET_ROUTER_ADDR} from "../common/L2ContractAddresses.sol"; +import {DataEncoding} from "../common/libraries/DataEncoding.sol"; +import {Unauthorized, SharedBridgeKey, DepositExists, AddressAlreadySet, InvalidProof, DepositDoesNotExist, SharedBridgeValueNotSet, WithdrawalAlreadyFinalized, L2WithdrawalMessageWrongLength, InvalidSelector, SharedBridgeValueNotSet, ZeroAddress} from "../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Bridges assets between L1 and ZK chain, supporting both ETH and ERC20 tokens. +/// @dev Designed for use with a proxy for upgradability. +contract L1Nullifier is IL1Nullifier, ReentrancyGuard, Ownable2StepUpgradeable, PausableUpgradeable { + using SafeERC20 for IERC20; + + /// @dev Bridgehub smart contract that is used to operate with L2 via asynchronous L2 <-> L1 communication. + IBridgehub public immutable override BRIDGE_HUB; + + /// @dev Era's chainID + uint256 internal immutable ERA_CHAIN_ID; + + /// @dev The address of ZKsync Era diamond proxy contract. + address internal immutable ERA_DIAMOND_PROXY; + + /// @dev Stores the first batch number on the ZKsync Era Diamond Proxy that was settled after Diamond proxy upgrade. + /// This variable is used to differentiate between pre-upgrade and post-upgrade Eth withdrawals. Withdrawals from batches older + /// than this value are considered to have been finalized prior to the upgrade and handled separately. + uint256 internal eraPostDiamondUpgradeFirstBatch; + + /// @dev Stores the first batch number on the ZKsync Era Diamond Proxy that was settled after L1ERC20 Bridge upgrade. + /// This variable is used to differentiate between pre-upgrade and post-upgrade ERC20 withdrawals. Withdrawals from batches older + /// than this value are considered to have been finalized prior to the upgrade and handled separately. + uint256 internal eraPostLegacyBridgeUpgradeFirstBatch; + + /// @dev Stores the ZKsync Era batch number that processes the last deposit tx initiated by the legacy bridge + /// This variable (together with eraLegacyBridgeLastDepositTxNumber) is used to differentiate between pre-upgrade and post-upgrade deposits. Deposits processed in older batches + /// than this value are considered to have been processed prior to the upgrade and handled separately. + /// We use this both for Eth and erc20 token deposits, so we need to update the diamond and bridge simultaneously. + uint256 internal eraLegacyBridgeLastDepositBatch; + + /// @dev The tx number in the _eraLegacyBridgeLastDepositBatch of the last deposit tx initiated by the legacy bridge. + /// This variable (together with eraLegacyBridgeLastDepositBatch) is used to differentiate between pre-upgrade and post-upgrade deposits. Deposits processed in older txs + /// than this value are considered to have been processed prior to the upgrade and handled separately. + /// We use this both for Eth and erc20 token deposits, so we need to update the diamond and bridge simultaneously. + uint256 internal eraLegacyBridgeLastDepositTxNumber; + + /// @dev Legacy bridge smart contract that used to hold ERC20 tokens. + IL1ERC20Bridge public override legacyBridge; + + /// @dev A mapping chainId => bridgeProxy. Used to store the bridge proxy's address, and to see if it has been deployed yet. + // slither-disable-next-line uninitialized-state + mapping(uint256 chainId => address l2Bridge) public __DEPRECATED_l2BridgeAddress; + + /// @dev A mapping chainId => L2 deposit transaction hash => dataHash + // keccak256(abi.encode(account, tokenAddress, amount)) for legacy transfers + // keccak256(abi.encode(_originalCaller, assetId, transferData)) for new transfers + /// @dev Tracks deposit transactions to L2 to enable users to claim their funds if a deposit fails. + mapping(uint256 chainId => mapping(bytes32 l2DepositTxHash => bytes32 depositDataHash)) + public + override depositHappened; + + /// @dev Tracks the processing status of L2 to L1 messages, indicating whether a message has already been finalized. + mapping(uint256 chainId => mapping(uint256 l2BatchNumber => mapping(uint256 l2ToL1MessageNumber => bool isFinalized))) + public isWithdrawalFinalized; + + /// @notice Deprecated. Kept for backwards compatibility. + /// @dev Indicates whether the hyperbridging is enabled for a given chain. + // slither-disable-next-line uninitialized-state + mapping(uint256 chainId => bool enabled) private __DEPRECATED_hyperbridgingEnabled; + + /// @dev Maps token balances for each chain to prevent unauthorized spending across ZK chain. + /// This serves as a security measure until hyperbridging is implemented. + /// NOTE: this function may be removed in the future, don't rely on it! + mapping(uint256 chainId => mapping(address l1Token => uint256 balance)) public __DEPRECATED_chainBalance; + + /// @dev Admin has the ability to register new chains within the shared bridge. + address public __DEPRECATED_admin; + + /// @dev The pending admin, i.e. the candidate to the admin role. + address public __DEPRECATED_pendingAdmin; + + /// @dev Address of L1 asset router. + IL1AssetRouter public l1AssetRouter; + + /// @dev Address of native token vault. + IL1NativeTokenVault public l1NativeTokenVault; + + /// @notice Checks that the message sender is the asset router.. + modifier onlyAssetRouter() { + if (msg.sender != address(l1AssetRouter)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks that the message sender is the native token vault. + modifier onlyL1NTV() { + if (msg.sender != address(l1NativeTokenVault)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks that the message sender is the bridgehub or ZKsync Era Diamond Proxy. + modifier onlyBridgehubOrEra(uint256 _chainId) { + if (msg.sender != address(BRIDGE_HUB) && (_chainId != ERA_CHAIN_ID || msg.sender != ERA_DIAMOND_PROXY)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks that the message sender is the legacy bridge. + modifier onlyLegacyBridge() { + if (msg.sender != address(legacyBridge)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks that the message sender is the legacy bridge. + modifier onlyAssetRouterOrErc20Bridge() { + if (msg.sender != address(l1AssetRouter) && msg.sender != address(legacyBridge)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @dev Contract is expected to be used as proxy implementation. + /// @dev Initialize the implementation to prevent Parity hack. + constructor(IBridgehub _bridgehub, uint256 _eraChainId, address _eraDiamondProxy) reentrancyGuardInitializer { + _disableInitializers(); + BRIDGE_HUB = _bridgehub; + ERA_CHAIN_ID = _eraChainId; + ERA_DIAMOND_PROXY = _eraDiamondProxy; + } + + /// @dev Initializes a contract bridge for later use. Expected to be used in the proxy. + /// @dev Used for testing purposes only, as the contract has been initialized on mainnet. + /// @param _owner The address which can change L2 token implementation and upgrade the bridge implementation. + /// The owner is the Governor and separate from the ProxyAdmin from now on, so that the Governor can call the bridge. + /// @param _eraPostDiamondUpgradeFirstBatch The first batch number on the ZKsync Era Diamond Proxy that was settled after diamond proxy upgrade. + /// @param _eraPostLegacyBridgeUpgradeFirstBatch The first batch number on the ZKsync Era Diamond Proxy that was settled after legacy bridge upgrade. + /// @param _eraLegacyBridgeLastDepositBatch The the ZKsync Era batch number that processes the last deposit tx initiated by the legacy bridge. + /// @param _eraLegacyBridgeLastDepositTxNumber The tx number in the _eraLegacyBridgeLastDepositBatch of the last deposit tx initiated by the legacy bridge. + function initialize( + address _owner, + uint256 _eraPostDiamondUpgradeFirstBatch, + uint256 _eraPostLegacyBridgeUpgradeFirstBatch, + uint256 _eraLegacyBridgeLastDepositBatch, + uint256 _eraLegacyBridgeLastDepositTxNumber + ) external reentrancyGuardInitializer initializer { + if (_owner == address(0)) { + revert ZeroAddress(); + } + _transferOwnership(_owner); + if (eraPostDiamondUpgradeFirstBatch == 0) { + eraPostDiamondUpgradeFirstBatch = _eraPostDiamondUpgradeFirstBatch; + eraPostLegacyBridgeUpgradeFirstBatch = _eraPostLegacyBridgeUpgradeFirstBatch; + eraLegacyBridgeLastDepositBatch = _eraLegacyBridgeLastDepositBatch; + eraLegacyBridgeLastDepositTxNumber = _eraLegacyBridgeLastDepositTxNumber; + } + } + + /// @notice Transfers tokens from shared bridge to native token vault. + /// @dev This function is part of the upgrade process used to transfer liquidity. + /// @param _token The address of the token to be transferred to NTV. + function transferTokenToNTV(address _token) external onlyL1NTV { + address ntvAddress = address(l1NativeTokenVault); + if (ETH_TOKEN_ADDRESS == _token) { + uint256 amount = address(this).balance; + bool callSuccess; + // Low-level assembly call, to avoid any memory copying (save gas) + assembly { + callSuccess := call(gas(), ntvAddress, amount, 0, 0, 0, 0) + } + require(callSuccess, "L1N: eth transfer failed"); + } else { + IERC20(_token).safeTransfer(ntvAddress, IERC20(_token).balanceOf(address(this))); + } + } + + /// @notice Clears chain balance for specific token. + /// @dev This function is part of the upgrade process used to nullify chain balances once they are credited to NTV. + /// @param _chainId The ID of the ZK chain. + /// @param _token The address of the token which was previously deposit to shared bridge. + function nullifyChainBalanceByNTV(uint256 _chainId, address _token) external { + require(msg.sender == address(l1NativeTokenVault), "L1N: not NTV"); + __DEPRECATED_chainBalance[_chainId][_token] = 0; + } + + /// @notice Legacy function used for migration, do not use! + /// @param _chainId The chain id on which the bridge is deployed. + // slither-disable-next-line uninitialized-state-variables + function l2BridgeAddress(uint256 _chainId) external view returns (address) { + // slither-disable-next-line uninitialized-state-variables + return __DEPRECATED_l2BridgeAddress[_chainId]; + } + + /// @notice Legacy function used for migration, do not use! + /// @param _chainId The chain id we want to get the balance for. + /// @param _token The address of the token. + // slither-disable-next-line uninitialized-state-variables + function chainBalance(uint256 _chainId, address _token) external view returns (uint256) { + // slither-disable-next-line uninitialized-state-variables + return __DEPRECATED_chainBalance[_chainId][_token]; + } + + /// @notice Sets the L1ERC20Bridge contract address. + /// @dev Should be called only once by the owner. + /// @param _legacyBridge The address of the legacy bridge. + function setL1Erc20Bridge(IL1ERC20Bridge _legacyBridge) external onlyOwner { + if (address(legacyBridge) != address(0)) { + revert AddressAlreadySet(address(legacyBridge)); + } + if (address(_legacyBridge) == address(0)) { + revert ZeroAddress(); + } + legacyBridge = _legacyBridge; + } + + /// @notice Sets the nativeTokenVault contract address. + /// @dev Should be called only once by the owner. + /// @param _l1NativeTokenVault The address of the native token vault. + function setL1NativeTokenVault(IL1NativeTokenVault _l1NativeTokenVault) external onlyOwner { + require(address(l1NativeTokenVault) == address(0), "L1N: native token vault already set"); + require(address(_l1NativeTokenVault) != address(0), "L1N: native token vault 0"); + l1NativeTokenVault = _l1NativeTokenVault; + } + + /// @notice Sets the L1 asset router contract address. + /// @dev Should be called only once by the owner. + /// @param _l1AssetRouter The address of the asset router. + function setL1AssetRouter(address _l1AssetRouter) external onlyOwner { + if (address(l1AssetRouter) != address(0)) { + revert AddressAlreadySet(address(_l1AssetRouter)); + } + require(_l1AssetRouter != address(0), "ShB: nullifier 0"); + l1AssetRouter = IL1AssetRouter(_l1AssetRouter); + } + + /// @notice Confirms the acceptance of a transaction by the Mailbox, as part of the L2 transaction process within Bridgehub. + /// This function is utilized by `requestL2TransactionTwoBridges` to validate the execution of a transaction. + /// @param _chainId The chain ID of the ZK chain to which confirm the deposit. + /// @param _txDataHash The keccak256 hash of 0x01 || abi.encode(bytes32, bytes) to identify deposits. + /// @param _txHash The hash of the L1->L2 transaction to confirm the deposit. + function bridgehubConfirmL2TransactionForwarded( + uint256 _chainId, + bytes32 _txDataHash, + bytes32 _txHash + ) external override onlyAssetRouter whenNotPaused { + if (depositHappened[_chainId][_txHash] != 0x00) { + revert DepositExists(); + } + depositHappened[_chainId][_txHash] = _txDataHash; + emit BridgehubDepositFinalized(_chainId, _txDataHash, _txHash); + } + + /// @dev Calls the internal `_encodeTxDataHash`. Used as a wrapped for try / catch case. + /// @dev Encodes the transaction data hash using either the latest encoding standard or the legacy standard. + /// @param _encodingVersion EncodingVersion. + /// @param _originalCaller The address of the entity that initiated the deposit. + /// @param _assetId The unique identifier of the deposited L1 token. + /// @param _transferData The encoded transfer data, which includes both the deposit amount and the address of the L2 receiver. + /// @return txDataHash The resulting encoded transaction data hash. + function encodeTxDataHash( + bytes1 _encodingVersion, + address _originalCaller, + bytes32 _assetId, + bytes calldata _transferData + ) external view returns (bytes32 txDataHash) { + txDataHash = DataEncoding.encodeTxDataHash({ + _encodingVersion: _encodingVersion, + _originalCaller: _originalCaller, + _assetId: _assetId, + _nativeTokenVault: address(l1NativeTokenVault), + _transferData: _transferData + }); + } + + /// @inheritdoc IL1Nullifier + function bridgeRecoverFailedTransfer( + uint256 _chainId, + address _depositSender, + bytes32 _assetId, + bytes memory _assetData, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) public nonReentrant { + _verifyAndClearFailedTransfer({ + _checkedInLegacyBridge: false, + _chainId: _chainId, + _depositSender: _depositSender, + _assetId: _assetId, + _assetData: _assetData, + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof + }); + + l1AssetRouter.bridgeRecoverFailedTransfer(_chainId, _depositSender, _assetId, _assetData); + } + + /// @dev Withdraw funds from the initiated deposit, that failed when finalizing on L2. + /// @param _chainId The ZK chain id to which deposit was initiated. + /// @param _depositSender The address of the entity that initiated the deposit. + /// @param _assetId The unique identifier of the deposited L1 token. + /// @param _assetData The encoded data, which is used by the asset handler to determine L2 recipient and amount. Might include extra information. + /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization. + /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed. + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message. + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent. + /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization. + /// @dev Processes claims of failed deposit, whether they originated from the legacy bridge or the current system. + function _verifyAndClearFailedTransfer( + bool _checkedInLegacyBridge, + uint256 _chainId, + address _depositSender, + bytes32 _assetId, + bytes memory _assetData, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) internal whenNotPaused { + { + bool proofValid = BRIDGE_HUB.proveL1ToL2TransactionStatus({ + _chainId: _chainId, + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof, + _status: TxStatus.Failure + }); + if (!proofValid) { + revert InvalidProof(); + } + } + + bool notCheckedInLegacyBridgeOrWeCanCheckDeposit; + { + // Deposits that happened before the upgrade cannot be checked here, they have to be claimed and checked in the legacyBridge + bool weCanCheckDepositHere = !_isPreSharedBridgeDepositOnEra(_chainId, _l2BatchNumber, _l2TxNumberInBatch); + // Double claims are not possible, as depositHappened is checked here for all except legacy deposits (which have to happen through the legacy bridge) + // Funds claimed before the update will still be recorded in the legacy bridge + // Note we double check NEW deposits if they are called from the legacy bridge + notCheckedInLegacyBridgeOrWeCanCheckDeposit = (!_checkedInLegacyBridge) || weCanCheckDepositHere; + } + + if (notCheckedInLegacyBridgeOrWeCanCheckDeposit) { + bytes32 dataHash = depositHappened[_chainId][_l2TxHash]; + // Determine if the given dataHash matches the calculated legacy transaction hash. + bool isLegacyTxDataHash = _isLegacyTxDataHash(_depositSender, _assetId, _assetData, dataHash); + // If the dataHash matches the legacy transaction hash, skip the next step. + // Otherwise, perform the check using the new transaction data hash encoding. + if (!isLegacyTxDataHash) { + bytes32 txDataHash = DataEncoding.encodeTxDataHash({ + _encodingVersion: NEW_ENCODING_VERSION, + _originalCaller: _depositSender, + _assetId: _assetId, + _nativeTokenVault: address(l1NativeTokenVault), + _transferData: _assetData + }); + if (dataHash != txDataHash) { + revert DepositDoesNotExist(); + } + } + } + delete depositHappened[_chainId][_l2TxHash]; + } + + /// @notice Finalize the withdrawal and release funds. + /// @param _finalizeWithdrawalParams The structure that holds all necessary data to finalize withdrawal + /// @dev We have both the legacy finalizeWithdrawal and the new finalizeDeposit functions, + /// finalizeDeposit uses the new format. On the L2 we have finalizeDeposit with new and old formats both. + function finalizeDeposit(FinalizeL1DepositParams calldata _finalizeWithdrawalParams) external { + _finalizeDeposit(_finalizeWithdrawalParams); + } + + /// @notice Internal function that handles the logic for finalizing withdrawals, supporting both the current bridge system and the legacy ERC20 bridge. + /// @param _finalizeWithdrawalParams The structure that holds all necessary data to finalize withdrawal + function _finalizeDeposit( + FinalizeL1DepositParams calldata _finalizeWithdrawalParams + ) internal nonReentrant whenNotPaused { + uint256 chainId = _finalizeWithdrawalParams.chainId; + uint256 l2BatchNumber = _finalizeWithdrawalParams.l2BatchNumber; + uint256 l2MessageIndex = _finalizeWithdrawalParams.l2MessageIndex; + if (isWithdrawalFinalized[chainId][l2BatchNumber][l2MessageIndex]) { + revert WithdrawalAlreadyFinalized(); + } + isWithdrawalFinalized[chainId][l2BatchNumber][l2MessageIndex] = true; + + // Handling special case for withdrawal from ZKsync Era initiated before Shared Bridge. + (bytes32 assetId, bytes memory transferData) = _verifyWithdrawal(_finalizeWithdrawalParams); + + // Handling special case for withdrawal from zkSync Era initiated before Shared Bridge. + if (_isPreSharedBridgeEraEthWithdrawal(chainId, l2BatchNumber)) { + // Checks that the withdrawal wasn't finalized already. + bool alreadyFinalized = IGetters(ERA_DIAMOND_PROXY).isEthWithdrawalFinalized(l2BatchNumber, l2MessageIndex); + require(!alreadyFinalized, "L1N: Withdrawal is already finalized 2"); + } + if (_isPreSharedBridgeEraTokenWithdrawal(chainId, l2BatchNumber)) { + require(!legacyBridge.isWithdrawalFinalized(l2BatchNumber, l2MessageIndex), "L1N: legacy withdrawal"); + } + + l1AssetRouter.finalizeDeposit(chainId, assetId, transferData); + } + + /// @dev Determines if an eth withdrawal was initiated on ZKsync Era before the upgrade to the Shared Bridge. + /// @param _chainId The chain ID of the transaction to check. + /// @param _l2BatchNumber The L2 batch number for the withdrawal. + /// @return Whether withdrawal was initiated on ZKsync Era before diamond proxy upgrade. + function _isPreSharedBridgeEraEthWithdrawal(uint256 _chainId, uint256 _l2BatchNumber) internal view returns (bool) { + if ((_chainId == ERA_CHAIN_ID) && eraPostDiamondUpgradeFirstBatch == 0) { + revert SharedBridgeValueNotSet(SharedBridgeKey.PostUpgradeFirstBatch); + } + return (_chainId == ERA_CHAIN_ID) && (_l2BatchNumber < eraPostDiamondUpgradeFirstBatch); + } + + /// @dev Determines if a token withdrawal was initiated on ZKsync Era before the upgrade to the Shared Bridge. + /// @param _chainId The chain ID of the transaction to check. + /// @param _l2BatchNumber The L2 batch number for the withdrawal. + /// @return Whether withdrawal was initiated on ZKsync Era before Legacy Bridge upgrade. + function _isPreSharedBridgeEraTokenWithdrawal( + uint256 _chainId, + uint256 _l2BatchNumber + ) internal view returns (bool) { + if ((_chainId == ERA_CHAIN_ID) && eraPostLegacyBridgeUpgradeFirstBatch == 0) { + revert SharedBridgeValueNotSet(SharedBridgeKey.LegacyBridgeFirstBatch); + } + return (_chainId == ERA_CHAIN_ID) && (_l2BatchNumber < eraPostLegacyBridgeUpgradeFirstBatch); + } + + /// @dev Determines if the provided data for a failed deposit corresponds to a legacy failed deposit. + /// @param _depositSender The address of the entity that initiated the deposit. + /// @param _assetId The unique identifier of the deposited L1 token. + /// @param _transferData The encoded transfer data, which includes both the deposit amount and the address of the L2 receiver. + /// @param _expectedTxDataHash The nullifier data hash stored for the failed deposit. + /// @return isLegacyTxDataHash True if the transaction is legacy, false otherwise. + function _isLegacyTxDataHash( + address _depositSender, + bytes32 _assetId, + bytes memory _transferData, + bytes32 _expectedTxDataHash + ) internal view returns (bool isLegacyTxDataHash) { + try this.encodeTxDataHash(LEGACY_ENCODING_VERSION, _depositSender, _assetId, _transferData) returns ( + bytes32 txDataHash + ) { + return txDataHash == _expectedTxDataHash; + } catch { + return false; + } + } + + /// @dev Determines if a deposit was initiated on ZKsync Era before the upgrade to the Shared Bridge. + /// @param _chainId The chain ID of the transaction to check. + /// @param _l2BatchNumber The L2 batch number for the deposit where it was processed. + /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the deposit was processed. + /// @return Whether deposit was initiated on ZKsync Era before Shared Bridge upgrade. + function _isPreSharedBridgeDepositOnEra( + uint256 _chainId, + uint256 _l2BatchNumber, + uint256 _l2TxNumberInBatch + ) internal view returns (bool) { + if ((_chainId == ERA_CHAIN_ID) && (eraLegacyBridgeLastDepositBatch == 0)) { + revert SharedBridgeValueNotSet(SharedBridgeKey.LegacyBridgeLastDepositBatch); + } + return + (_chainId == ERA_CHAIN_ID) && + (_l2BatchNumber < eraLegacyBridgeLastDepositBatch || + (_l2TxNumberInBatch <= eraLegacyBridgeLastDepositTxNumber && + _l2BatchNumber == eraLegacyBridgeLastDepositBatch)); + } + + /// @notice Verifies the validity of a withdrawal message from L2 and returns withdrawal details. + /// @param _finalizeWithdrawalParams The structure that holds all necessary data to finalize withdrawal + /// @return assetId The ID of the bridged asset. + /// @return transferData The transfer data used to finalize withdawal. + function _verifyWithdrawal( + FinalizeL1DepositParams calldata _finalizeWithdrawalParams + ) internal returns (bytes32 assetId, bytes memory transferData) { + (assetId, transferData) = _parseL2WithdrawalMessage( + _finalizeWithdrawalParams.chainId, + _finalizeWithdrawalParams.message + ); + L2Message memory l2ToL1Message; + { + address l2Sender = _finalizeWithdrawalParams.l2Sender; + bool baseTokenWithdrawal = (assetId == BRIDGE_HUB.baseTokenAssetId(_finalizeWithdrawalParams.chainId)); + require( + /// @dev for legacy function calls we hardcode the sender as the L2AssetRouter as we don't know if it is + /// a base token or erc20 token withdrawal beforehand, + /// so we have to allow that option even if we override it. + l2Sender == L2_ASSET_ROUTER_ADDR || + l2Sender == L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR || + l2Sender == __DEPRECATED_l2BridgeAddress[_finalizeWithdrawalParams.chainId], + "L1N: wrong l2 sender" + ); + + l2ToL1Message = L2Message({ + txNumberInBatch: _finalizeWithdrawalParams.l2TxNumberInBatch, + sender: baseTokenWithdrawal ? L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR : l2Sender, + data: _finalizeWithdrawalParams.message + }); + } + + bool success = BRIDGE_HUB.proveL2MessageInclusion({ + _chainId: _finalizeWithdrawalParams.chainId, + _batchNumber: _finalizeWithdrawalParams.l2BatchNumber, + _index: _finalizeWithdrawalParams.l2MessageIndex, + _message: l2ToL1Message, + _proof: _finalizeWithdrawalParams.merkleProof + }); + // withdrawal wrong proof + if (!success) { + revert InvalidProof(); + } + } + + /// @notice Parses the withdrawal message and returns withdrawal details. + /// @dev Currently, 3 different encoding versions are supported: legacy mailbox withdrawal, ERC20 bridge withdrawal, + /// @dev and the latest version supported by shared bridge. Selectors are used for versioning. + /// @param _chainId The ZK chain ID. + /// @param _l2ToL1message The encoded L2 -> L1 message. + /// @return assetId The ID of the bridged asset. + /// @return transferData The transfer data used to finalize withdawal. + function _parseL2WithdrawalMessage( + uint256 _chainId, + bytes memory _l2ToL1message + ) internal returns (bytes32 assetId, bytes memory transferData) { + // Please note that there are three versions of the message: + // 1. The message that is sent from `L2BaseToken` to withdraw base token. + // 2. The message that is sent from L2 Legacy Shared Bridge to withdraw ERC20 tokens or base token. + // 3. The message that is sent from L2 Asset Router to withdraw ERC20 tokens or base token. + + uint256 amount; + address l1Receiver; + + (uint32 functionSignature, uint256 offset) = UnsafeBytes.readUint32(_l2ToL1message, 0); + if (bytes4(functionSignature) == IMailbox.finalizeEthWithdrawal.selector) { + // The data is expected to be at least 56 bytes long. + if (_l2ToL1message.length < 56) { + revert L2WithdrawalMessageWrongLength(_l2ToL1message.length); + } + // this message is a base token withdrawal + (l1Receiver, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset); + // slither-disable-next-line unused-return + (amount, ) = UnsafeBytes.readUint256(_l2ToL1message, offset); + assetId = BRIDGE_HUB.baseTokenAssetId(_chainId); + address baseToken = BRIDGE_HUB.baseToken(_chainId); + transferData = DataEncoding.encodeBridgeMintData({ + _originalCaller: address(0), + _l2Receiver: l1Receiver, + _l1Token: baseToken, + _amount: amount, + _erc20Metadata: new bytes(0) + }); + } else if (bytes4(functionSignature) == IL1ERC20Bridge.finalizeWithdrawal.selector) { + // this message is a token withdrawal + + // Check that the message length is correct. + // It should be equal to the length of the function signature + address + address + uint256 = 4 + 20 + 20 + 32 = + // 76 (bytes). + if (_l2ToL1message.length != 76) { + revert L2WithdrawalMessageWrongLength(_l2ToL1message.length); + } + (l1Receiver, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset); + // We use the IL1ERC20Bridge for backward compatibility with old withdrawals. + address l1Token; + (l1Token, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset); + // slither-disable-next-line unused-return + (amount, ) = UnsafeBytes.readUint256(_l2ToL1message, offset); + + l1NativeTokenVault.ensureTokenIsRegistered(l1Token); + assetId = DataEncoding.encodeNTVAssetId(block.chainid, l1Token); + transferData = DataEncoding.encodeBridgeMintData({ + _originalCaller: address(0), + _l2Receiver: l1Receiver, + _l1Token: l1Token, + _amount: amount, + _erc20Metadata: new bytes(0) + }); + } else if (bytes4(functionSignature) == IAssetRouterBase.finalizeDeposit.selector) { + // The data is expected to be at least 36 bytes long to contain assetId. + require(_l2ToL1message.length >= 36, "L1N: wrong msg len"); // wrong message length + // slither-disable-next-line unused-return + (, offset) = UnsafeBytes.readUint256(_l2ToL1message, offset); // originChainId, not used for L2->L1 txs + (assetId, offset) = UnsafeBytes.readBytes32(_l2ToL1message, offset); + transferData = UnsafeBytes.readRemainingBytes(_l2ToL1message, offset); + } else { + revert InvalidSelector(bytes4(functionSignature)); + } + } + + /*////////////////////////////////////////////////////////////// + SHARED BRIDGE TOKEN BRIDGING LEGACY FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @dev Withdraw funds from the initiated deposit, that failed when finalizing on L2. + /// @param _depositSender The address of the deposit initiator. + /// @param _l1Token The address of the deposited L1 ERC20 token. + /// @param _amount The amount of the deposit that failed. + /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization. + /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed. + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message. + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent. + /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization. + function claimFailedDeposit( + uint256 _chainId, + address _depositSender, + address _l1Token, + uint256 _amount, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external override { + bytes32 assetId = l1NativeTokenVault.assetId(_l1Token); + if (assetId == bytes32(0)) { + assetId = DataEncoding.encodeNTVAssetId(block.chainid, _l1Token); + } + // For legacy deposits, the l2 receiver is not required to check tx data hash + // bytes memory transferData = abi.encode(_amount, _depositSender); + bytes memory assetData = abi.encode(_amount, address(0)); + + _verifyAndClearFailedTransfer({ + _checkedInLegacyBridge: false, + _depositSender: _depositSender, + _chainId: _chainId, + _assetId: assetId, + _assetData: assetData, + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof + }); + + l1AssetRouter.bridgeRecoverFailedTransfer({ + _chainId: _chainId, + _depositSender: _depositSender, + _assetId: assetId, + _assetData: assetData + }); + } + + /*////////////////////////////////////////////////////////////// + ERA ERC20 LEGACY FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @notice Withdraw funds from the initiated deposit, that failed when finalizing on ZKsync Era chain. + /// This function is specifically designed for maintaining backward-compatibility with legacy `claimFailedDeposit` + /// method in `L1ERC20Bridge`. + /// + /// @param _depositSender The address of the deposit initiator. + /// @param _l1Asset The address of the deposited L1 ERC20 token. + /// @param _amount The amount of the deposit that failed. + /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization. + /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed. + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message. + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent. + /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization. + function claimFailedDepositLegacyErc20Bridge( + address _depositSender, + address _l1Asset, + uint256 _amount, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external override onlyLegacyBridge { + bytes memory assetData = abi.encode(_amount, _depositSender); + /// the legacy bridge can only be used with L1 native tokens. + bytes32 assetId = DataEncoding.encodeNTVAssetId(block.chainid, _l1Asset); + + _verifyAndClearFailedTransfer({ + _checkedInLegacyBridge: true, + _depositSender: _depositSender, + _chainId: ERA_CHAIN_ID, + _assetId: assetId, + _assetData: assetData, + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof + }); + + l1AssetRouter.bridgeRecoverFailedTransfer({ + _chainId: ERA_CHAIN_ID, + _depositSender: _depositSender, + _assetId: assetId, + _assetData: assetData + }); + } + + /*////////////////////////////////////////////////////////////// + PAUSE + //////////////////////////////////////////////////////////////*/ + + /// @notice Pauses all functions marked with the `whenNotPaused` modifier. + function pause() external onlyOwner { + _pause(); + } + + /// @notice Unpauses the contract, allowing all functions marked with the `whenNotPaused` modifier to be called again. + function unpause() external onlyOwner { + _unpause(); + } +} diff --git a/l1-contracts/contracts/bridge/L1SharedBridge.sol b/l1-contracts/contracts/bridge/L1SharedBridge.sol deleted file mode 100644 index 42058fe33..000000000 --- a/l1-contracts/contracts/bridge/L1SharedBridge.sol +++ /dev/null @@ -1,866 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable/access/Ownable2StepUpgradeable.sol"; -import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable/security/PausableUpgradeable.sol"; - -import {IERC20Metadata} from "@openzeppelin/contracts/token/ERC20/extensions/IERC20Metadata.sol"; -import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; -import {SafeERC20} from "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; - -import {IL1ERC20Bridge} from "./interfaces/IL1ERC20Bridge.sol"; -import {IL1SharedBridge} from "./interfaces/IL1SharedBridge.sol"; -import {IL2Bridge} from "./interfaces/IL2Bridge.sol"; - -import {IMailbox} from "../state-transition/chain-interfaces/IMailbox.sol"; -import {L2Message, TxStatus} from "../common/Messaging.sol"; -import {UnsafeBytes} from "../common/libraries/UnsafeBytes.sol"; -import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; -import {AddressAliasHelper} from "../vendor/AddressAliasHelper.sol"; -import {ETH_TOKEN_ADDRESS, TWO_BRIDGES_MAGIC_VALUE} from "../common/Config.sol"; -import {IBridgehub, L2TransactionRequestTwoBridgesInner, L2TransactionRequestDirect} from "../bridgehub/IBridgehub.sol"; -import {IGetters} from "../state-transition/chain-interfaces/IGetters.sol"; -import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "../common/L2ContractAddresses.sol"; - -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -/// @dev Bridges assets between L1 and hyperchains, supporting both ETH and ERC20 tokens. -/// @dev Designed for use with a proxy for upgradability. -contract L1SharedBridge is IL1SharedBridge, ReentrancyGuard, Ownable2StepUpgradeable, PausableUpgradeable { - using SafeERC20 for IERC20; - - /// @dev The address of the WETH token on L1. - address public immutable override L1_WETH_TOKEN; - - /// @dev Bridgehub smart contract that is used to operate with L2 via asynchronous L2 <-> L1 communication. - IBridgehub public immutable override BRIDGE_HUB; - - /// @dev Era's chainID - uint256 public immutable ERA_CHAIN_ID; - - /// @dev The address of zkSync Era diamond proxy contract. - address public immutable ERA_DIAMOND_PROXY; - - /// @dev Stores the first batch number on the zkSync Era Diamond Proxy that was settled after Diamond proxy upgrade. - /// This variable is used to differentiate between pre-upgrade and post-upgrade Eth withdrawals. Withdrawals from batches older - /// than this value are considered to have been finalized prior to the upgrade and handled separately. - uint256 internal eraPostDiamondUpgradeFirstBatch; - - /// @dev Stores the first batch number on the zkSync Era Diamond Proxy that was settled after L1ERC20 Bridge upgrade. - /// This variable is used to differentiate between pre-upgrade and post-upgrade ERC20 withdrawals. Withdrawals from batches older - /// than this value are considered to have been finalized prior to the upgrade and handled separately. - uint256 internal eraPostLegacyBridgeUpgradeFirstBatch; - - /// @dev Stores the zkSync Era batch number that processes the last deposit tx initiated by the legacy bridge - /// This variable (together with eraLegacyBridgeLastDepositTxNumber) is used to differentiate between pre-upgrade and post-upgrade deposits. Deposits processed in older batches - /// than this value are considered to have been processed prior to the upgrade and handled separately. - /// We use this both for Eth and erc20 token deposits, so we need to update the diamond and bridge simultaneously. - uint256 internal eraLegacyBridgeLastDepositBatch; - - /// @dev The tx number in the _eraLegacyBridgeLastDepositBatch of the last deposit tx initiated by the legacy bridge - /// This variable (together with eraLegacyBridgeLastDepositBatch) is used to differentiate between pre-upgrade and post-upgrade deposits. Deposits processed in older txs - /// than this value are considered to have been processed prior to the upgrade and handled separately. - /// We use this both for Eth and erc20 token deposits, so we need to update the diamond and bridge simultaneously. - uint256 internal eraLegacyBridgeLastDepositTxNumber; - - /// @dev Legacy bridge smart contract that used to hold ERC20 tokens. - IL1ERC20Bridge public override legacyBridge; - - /// @dev A mapping chainId => bridgeProxy. Used to store the bridge proxy's address, and to see if it has been deployed yet. - mapping(uint256 chainId => address l2Bridge) public override l2BridgeAddress; - - /// @dev A mapping chainId => L2 deposit transaction hash => keccak256(abi.encode(account, tokenAddress, amount)) - /// @dev Tracks deposit transactions from L2 to enable users to claim their funds if a deposit fails. - mapping(uint256 chainId => mapping(bytes32 l2DepositTxHash => bytes32 depositDataHash)) - public - override depositHappened; - - /// @dev Tracks the processing status of L2 to L1 messages, indicating whether a message has already been finalized. - mapping(uint256 chainId => mapping(uint256 l2BatchNumber => mapping(uint256 l2ToL1MessageNumber => bool isFinalized))) - public isWithdrawalFinalized; - - /// @dev Indicates whether the hyperbridging is enabled for a given chain. - // slither-disable-next-line uninitialized-state - mapping(uint256 chainId => bool enabled) internal hyperbridgingEnabled; - - /// @dev Maps token balances for each chain to prevent unauthorized spending across hyperchains. - /// This serves as a security measure until hyperbridging is implemented. - /// NOTE: this function may be removed in the future, don't rely on it! - mapping(uint256 chainId => mapping(address l1Token => uint256 balance)) public chainBalance; - - /// @dev Admin has the ability to register new chains within the shared bridge. - address public admin; - - /// @dev The pending admin, i.e. the candidate to the admin role. - address public pendingAdmin; - - /// @notice Checks that the message sender is the bridgehub. - modifier onlyBridgehub() { - require(msg.sender == address(BRIDGE_HUB), "ShB not BH"); - _; - } - - /// @notice Checks that the message sender is the bridgehub or zkSync Era Diamond Proxy. - modifier onlyBridgehubOrEra(uint256 _chainId) { - require( - msg.sender == address(BRIDGE_HUB) || (_chainId == ERA_CHAIN_ID && msg.sender == ERA_DIAMOND_PROXY), - "L1SharedBridge: not bridgehub or era chain" - ); - _; - } - - /// @notice Checks that the message sender is the legacy bridge. - modifier onlyLegacyBridge() { - require(msg.sender == address(legacyBridge), "ShB not legacy bridge"); - _; - } - - /// @notice Checks that the message sender is the shared bridge itself. - modifier onlySelf() { - require(msg.sender == address(this), "ShB not shared bridge"); - _; - } - - /// @notice Checks that the message sender is either the owner or admin. - modifier onlyOwnerOrAdmin() { - require(msg.sender == owner() || msg.sender == admin, "ShB not owner or admin"); - _; - } - - /// @dev Contract is expected to be used as proxy implementation. - /// @dev Initialize the implementation to prevent Parity hack. - constructor( - address _l1WethAddress, - IBridgehub _bridgehub, - uint256 _eraChainId, - address _eraDiamondProxy - ) reentrancyGuardInitializer { - _disableInitializers(); - L1_WETH_TOKEN = _l1WethAddress; - BRIDGE_HUB = _bridgehub; - ERA_CHAIN_ID = _eraChainId; - ERA_DIAMOND_PROXY = _eraDiamondProxy; - } - - /// @dev Initializes a contract bridge for later use. Expected to be used in the proxy - /// @param _owner Address which can change L2 token implementation and upgrade the bridge - /// implementation. The owner is the Governor and separate from the ProxyAdmin from now on, so that the Governor can call the bridge. - function initialize(address _owner) external reentrancyGuardInitializer initializer { - require(_owner != address(0), "ShB owner 0"); - _transferOwnership(_owner); - } - - /// @inheritdoc IL1SharedBridge - /// @dev Please note, if the owner wants to enforce the admin change it must execute both `setPendingAdmin` and - /// `acceptAdmin` atomically. Otherwise `admin` can set different pending admin and so fail to accept the admin rights. - function setPendingAdmin(address _newPendingAdmin) external onlyOwnerOrAdmin { - // Save previous value into the stack to put it into the event later - address oldPendingAdmin = pendingAdmin; - // Change pending admin - pendingAdmin = _newPendingAdmin; - emit NewPendingAdmin(oldPendingAdmin, _newPendingAdmin); - } - - /// @inheritdoc IL1SharedBridge - /// @notice Accepts transfer of admin rights. Only pending admin can accept the role. - function acceptAdmin() external { - address currentPendingAdmin = pendingAdmin; - require(msg.sender == currentPendingAdmin, "ShB not pending admin"); // Only proposed by current admin address can claim the admin rights - - address previousAdmin = admin; - admin = currentPendingAdmin; - delete pendingAdmin; - - emit NewPendingAdmin(currentPendingAdmin, address(0)); - emit NewAdmin(previousAdmin, currentPendingAdmin); - } - - /// @dev This sets the first post diamond upgrade batch for era, used to check old eth withdrawals - /// @param _eraPostDiamondUpgradeFirstBatch The first batch number on the zkSync Era Diamond Proxy that was settled after diamond proxy upgrade. - function setEraPostDiamondUpgradeFirstBatch(uint256 _eraPostDiamondUpgradeFirstBatch) external onlyOwner { - require(eraPostDiamondUpgradeFirstBatch == 0, "ShB: eFPUB already set"); - eraPostDiamondUpgradeFirstBatch = _eraPostDiamondUpgradeFirstBatch; - } - - /// @dev This sets the first post upgrade batch for era, used to check old token withdrawals - /// @param _eraPostLegacyBridgeUpgradeFirstBatch The first batch number on the zkSync Era Diamond Proxy that was settled after legacy bridge upgrade. - function setEraPostLegacyBridgeUpgradeFirstBatch(uint256 _eraPostLegacyBridgeUpgradeFirstBatch) external onlyOwner { - require(eraPostLegacyBridgeUpgradeFirstBatch == 0, "ShB: eFPUB already set"); - eraPostLegacyBridgeUpgradeFirstBatch = _eraPostLegacyBridgeUpgradeFirstBatch; - } - - /// @dev This sets the first post upgrade batch for era, used to check old withdrawals - /// @param _eraLegacyBridgeLastDepositBatch The the zkSync Era batch number that processes the last deposit tx initiated by the legacy bridge - /// @param _eraLegacyBridgeLastDepositTxNumber The tx number in the _eraLegacyBridgeLastDepositBatch of the last deposit tx initiated by the legacy bridge - function setEraLegacyBridgeLastDepositTime( - uint256 _eraLegacyBridgeLastDepositBatch, - uint256 _eraLegacyBridgeLastDepositTxNumber - ) external onlyOwner { - require(eraLegacyBridgeLastDepositBatch == 0, "ShB: eLOBDB already set"); - require(eraLegacyBridgeLastDepositTxNumber == 0, "ShB: eLOBDTN already set"); - eraLegacyBridgeLastDepositBatch = _eraLegacyBridgeLastDepositBatch; - eraLegacyBridgeLastDepositTxNumber = _eraLegacyBridgeLastDepositTxNumber; - } - - /// @dev transfer tokens from legacy erc20 bridge or mailbox and set chainBalance as part of migration process - function transferFundsFromLegacy(address _token, address _target, uint256 _targetChainId) external onlySelf { - if (_token == ETH_TOKEN_ADDRESS) { - uint256 balanceBefore = address(this).balance; - IMailbox(_target).transferEthToSharedBridge(); - uint256 balanceAfter = address(this).balance; - require(balanceAfter > balanceBefore, "ShB: 0 eth transferred"); - chainBalance[_targetChainId][ETH_TOKEN_ADDRESS] = - chainBalance[_targetChainId][ETH_TOKEN_ADDRESS] + - balanceAfter - - balanceBefore; - } else { - uint256 balanceBefore = IERC20(_token).balanceOf(address(this)); - uint256 legacyBridgeBalance = IERC20(_token).balanceOf(address(legacyBridge)); - require(legacyBridgeBalance > 0, "ShB: 0 amount to transfer"); - IL1ERC20Bridge(_target).transferTokenToSharedBridge(_token); - uint256 balanceAfter = IERC20(_token).balanceOf(address(this)); - require(balanceAfter - balanceBefore >= legacyBridgeBalance, "ShB: wrong amount transferred"); - chainBalance[_targetChainId][_token] = chainBalance[_targetChainId][_token] + legacyBridgeBalance; - } - } - - /// @dev transfer tokens from legacy erc20 bridge or mailbox and set chainBalance as part of migration process. - /// @dev Unlike `transferFundsFromLegacy` is provides a concrete limit on the gas used for the transfer and even if it will fail, it will not revert the whole transaction. - function safeTransferFundsFromLegacy( - address _token, - address _target, - uint256 _targetChainId, - uint256 _gasPerToken - ) external onlyOwner { - try this.transferFundsFromLegacy{gas: _gasPerToken}(_token, _target, _targetChainId) {} catch { - // A reasonable amount of gas will be provided to transfer the token. - // If the transfer fails, we don't want to revert the whole transaction. - } - } - - function receiveEth(uint256 _chainId) external payable { - require(BRIDGE_HUB.getHyperchain(_chainId) == msg.sender, "receiveEth not state transition"); - } - - /// @dev Initializes the l2Bridge address by governance for a specific chain. - /// @param _chainId The chain ID for which the l2Bridge address is being initialized. - /// @param _l2BridgeAddress The address of the L2 bridge contract. - function initializeChainGovernance(uint256 _chainId, address _l2BridgeAddress) external onlyOwnerOrAdmin { - require(l2BridgeAddress[_chainId] == address(0), "ShB: l2 bridge already set"); - require(_l2BridgeAddress != address(0), "ShB: l2 bridge 0"); - l2BridgeAddress[_chainId] = _l2BridgeAddress; - } - - /// @dev Reinitializes the l2Bridge address by governance for a specific chain. - /// @dev Only accessible to the owner of the bridge to prevent malicious admin from changing the bridge address for - /// an existing chain. - /// @param _chainId The chain ID for which the l2Bridge address is being initialized. - /// @param _l2BridgeAddress The address of the L2 bridge contract. - function reinitializeChainGovernance(uint256 _chainId, address _l2BridgeAddress) external onlyOwner { - require(l2BridgeAddress[_chainId] != address(0), "ShB: l2 bridge not yet set"); - l2BridgeAddress[_chainId] = _l2BridgeAddress; - } - - /// @notice Allows bridgehub to acquire mintValue for L1->L2 transactions. - /// @dev If the corresponding L2 transaction fails, refunds are issued to a refund recipient on L2. - function bridgehubDepositBaseToken( - uint256 _chainId, - address _prevMsgSender, - address _l1Token, - uint256 _amount - ) external payable virtual onlyBridgehubOrEra(_chainId) whenNotPaused { - if (_l1Token == ETH_TOKEN_ADDRESS) { - require(msg.value == _amount, "L1SharedBridge: msg.value not equal to amount"); - } else { - // The Bridgehub also checks this, but we want to be sure - require(msg.value == 0, "ShB m.v > 0 b d.it"); - - uint256 amount = _depositFunds(_prevMsgSender, IERC20(_l1Token), _amount); // note if _prevMsgSender is this contract, this will return 0. This does not happen. - require(amount == _amount, "3T"); // The token has non-standard transfer logic - } - - if (!hyperbridgingEnabled[_chainId]) { - chainBalance[_chainId][_l1Token] += _amount; - } - // Note that we don't save the deposited amount, as this is for the base token, which gets sent to the refundRecipient if the tx fails - emit BridgehubDepositBaseTokenInitiated(_chainId, _prevMsgSender, _l1Token, _amount); - } - - /// @dev Transfers tokens from the depositor address to the smart contract address. - /// @return The difference between the contract balance before and after the transferring of funds. - function _depositFunds(address _from, IERC20 _token, uint256 _amount) internal returns (uint256) { - uint256 balanceBefore = _token.balanceOf(address(this)); - // slither-disable-next-line arbitrary-send-erc20 - _token.safeTransferFrom(_from, address(this), _amount); - uint256 balanceAfter = _token.balanceOf(address(this)); - - return balanceAfter - balanceBefore; - } - - /// @notice Initiates a deposit transaction within Bridgehub, used by `requestL2TransactionTwoBridges`. - function bridgehubDeposit( - uint256 _chainId, - address _prevMsgSender, - // solhint-disable-next-line no-unused-vars - uint256 _l2Value, - bytes calldata _data - ) - external - payable - override - onlyBridgehub - whenNotPaused - returns (L2TransactionRequestTwoBridgesInner memory request) - { - require(l2BridgeAddress[_chainId] != address(0), "ShB l2 bridge not deployed"); - - (address _l1Token, uint256 _depositAmount, address _l2Receiver) = abi.decode( - _data, - (address, uint256, address) - ); - require(_l1Token != L1_WETH_TOKEN, "ShB: WETH deposit not supported"); - require(BRIDGE_HUB.baseToken(_chainId) != _l1Token, "ShB: baseToken deposit not supported"); - - uint256 amount; - if (_l1Token == ETH_TOKEN_ADDRESS) { - amount = msg.value; - require(_depositAmount == 0, "ShB wrong withdraw amount"); - } else { - require(msg.value == 0, "ShB m.v > 0 for BH d.it 2"); - amount = _depositAmount; - - uint256 withdrawAmount = _depositFunds(_prevMsgSender, IERC20(_l1Token), _depositAmount); - require(withdrawAmount == _depositAmount, "5T"); // The token has non-standard transfer logic - } - require(amount != 0, "6T"); // empty deposit amount - - bytes32 txDataHash = keccak256(abi.encode(_prevMsgSender, _l1Token, amount)); - if (!hyperbridgingEnabled[_chainId]) { - chainBalance[_chainId][_l1Token] += amount; - } - - { - // Request the finalization of the deposit on the L2 side - bytes memory l2TxCalldata = _getDepositL2Calldata(_prevMsgSender, _l2Receiver, _l1Token, amount); - - request = L2TransactionRequestTwoBridgesInner({ - magicValue: TWO_BRIDGES_MAGIC_VALUE, - l2Contract: l2BridgeAddress[_chainId], - l2Calldata: l2TxCalldata, - factoryDeps: new bytes[](0), - txDataHash: txDataHash - }); - } - emit BridgehubDepositInitiated({ - chainId: _chainId, - txDataHash: txDataHash, - from: _prevMsgSender, - to: _l2Receiver, - l1Token: _l1Token, - amount: amount - }); - } - - /// @notice Confirms the acceptance of a transaction by the Mailbox, as part of the L2 transaction process within Bridgehub. - /// This function is utilized by `requestL2TransactionTwoBridges` to validate the execution of a transaction. - function bridgehubConfirmL2Transaction( - uint256 _chainId, - bytes32 _txDataHash, - bytes32 _txHash - ) external override onlyBridgehub whenNotPaused { - require(depositHappened[_chainId][_txHash] == 0x00, "ShB tx hap"); - depositHappened[_chainId][_txHash] = _txDataHash; - emit BridgehubDepositFinalized(_chainId, _txDataHash, _txHash); - } - - /// @dev Sets the L1ERC20Bridge contract address. Should be called only once. - function setL1Erc20Bridge(address _legacyBridge) external onlyOwner { - require(address(legacyBridge) == address(0), "ShB: legacy bridge already set"); - require(_legacyBridge != address(0), "ShB: legacy bridge 0"); - legacyBridge = IL1ERC20Bridge(_legacyBridge); - } - - /// @dev Generate a calldata for calling the deposit finalization on the L2 bridge contract - function _getDepositL2Calldata( - address _l1Sender, - address _l2Receiver, - address _l1Token, - uint256 _amount - ) internal view returns (bytes memory) { - bytes memory gettersData = _getERC20Getters(_l1Token); - return abi.encodeCall(IL2Bridge.finalizeDeposit, (_l1Sender, _l2Receiver, _l1Token, _amount, gettersData)); - } - - /// @dev Receives and parses (name, symbol, decimals) from the token contract - function _getERC20Getters(address _token) internal view returns (bytes memory) { - if (_token == ETH_TOKEN_ADDRESS) { - bytes memory name = bytes("Ether"); - bytes memory symbol = bytes("ETH"); - bytes memory decimals = abi.encode(uint8(18)); - return abi.encode(name, symbol, decimals); // when depositing eth to a non-eth based chain it is an ERC20 - } - - (, bytes memory data1) = _token.staticcall(abi.encodeCall(IERC20Metadata.name, ())); - (, bytes memory data2) = _token.staticcall(abi.encodeCall(IERC20Metadata.symbol, ())); - (, bytes memory data3) = _token.staticcall(abi.encodeCall(IERC20Metadata.decimals, ())); - return abi.encode(data1, data2, data3); - } - - /// @dev Withdraw funds from the initiated deposit, that failed when finalizing on L2 - /// @param _depositSender The address of the deposit initiator - /// @param _l1Token The address of the deposited L1 ERC20 token - /// @param _amount The amount of the deposit that failed. - /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization - /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed - /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent - /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization - function claimFailedDeposit( - uint256 _chainId, - address _depositSender, - address _l1Token, - uint256 _amount, - bytes32 _l2TxHash, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes32[] calldata _merkleProof - ) external override { - _claimFailedDeposit({ - _checkedInLegacyBridge: false, - _chainId: _chainId, - _depositSender: _depositSender, - _l1Token: _l1Token, - _amount: _amount, - _l2TxHash: _l2TxHash, - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _merkleProof: _merkleProof - }); - } - - /// @dev Processes claims of failed deposit, whether they originated from the legacy bridge or the current system. - function _claimFailedDeposit( - bool _checkedInLegacyBridge, - uint256 _chainId, - address _depositSender, - address _l1Token, - uint256 _amount, - bytes32 _l2TxHash, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes32[] calldata _merkleProof - ) internal nonReentrant whenNotPaused { - { - bool proofValid = BRIDGE_HUB.proveL1ToL2TransactionStatus({ - _chainId: _chainId, - _l2TxHash: _l2TxHash, - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _merkleProof: _merkleProof, - _status: TxStatus.Failure - }); - require(proofValid, "yn"); - } - require(_amount > 0, "y1"); - - { - bool notCheckedInLegacyBridgeOrWeCanCheckDeposit; - { - // Deposits that happened before the upgrade cannot be checked here, they have to be claimed and checked in the legacyBridge - bool weCanCheckDepositHere = !_isEraLegacyDeposit(_chainId, _l2BatchNumber, _l2TxNumberInBatch); - // Double claims are not possible, as depositHappened is checked here for all except legacy deposits (which have to happen through the legacy bridge) - // Funds claimed before the update will still be recorded in the legacy bridge - // Note we double check NEW deposits if they are called from the legacy bridge - notCheckedInLegacyBridgeOrWeCanCheckDeposit = (!_checkedInLegacyBridge) || weCanCheckDepositHere; - } - if (notCheckedInLegacyBridgeOrWeCanCheckDeposit) { - bytes32 dataHash = depositHappened[_chainId][_l2TxHash]; - bytes32 txDataHash = keccak256(abi.encode(_depositSender, _l1Token, _amount)); - require(dataHash == txDataHash, "ShB: d.it not hap"); - delete depositHappened[_chainId][_l2TxHash]; - } - } - - if (!hyperbridgingEnabled[_chainId]) { - // check that the chain has sufficient balance - require(chainBalance[_chainId][_l1Token] >= _amount, "ShB n funds"); - chainBalance[_chainId][_l1Token] -= _amount; - } - - // Withdraw funds - if (_l1Token == ETH_TOKEN_ADDRESS) { - bool callSuccess; - // Low-level assembly call, to avoid any memory copying (save gas) - assembly { - callSuccess := call(gas(), _depositSender, _amount, 0, 0, 0, 0) - } - require(callSuccess, "ShB: claimFailedDeposit failed"); - } else { - IERC20(_l1Token).safeTransfer(_depositSender, _amount); - // Note we don't allow weth deposits anymore, but there might be legacy weth deposits. - // until we add Weth bridging capabilities, we don't wrap/unwrap weth to ether. - } - - emit ClaimedFailedDepositSharedBridge(_chainId, _depositSender, _l1Token, _amount); - } - - /// @dev Determines if an eth withdrawal was initiated on zkSync Era before the upgrade to the Shared Bridge. - /// @param _chainId The chain ID of the transaction to check. - /// @param _l2BatchNumber The L2 batch number for the withdrawal. - /// @return Whether withdrawal was initiated on zkSync Era before diamond proxy upgrade. - function _isEraLegacyEthWithdrawal(uint256 _chainId, uint256 _l2BatchNumber) internal view returns (bool) { - require((_chainId != ERA_CHAIN_ID) || eraPostDiamondUpgradeFirstBatch != 0, "ShB: diamondUFB not set for Era"); - return (_chainId == ERA_CHAIN_ID) && (_l2BatchNumber < eraPostDiamondUpgradeFirstBatch); - } - - /// @dev Determines if a token withdrawal was initiated on zkSync Era before the upgrade to the Shared Bridge. - /// @param _chainId The chain ID of the transaction to check. - /// @param _l2BatchNumber The L2 batch number for the withdrawal. - /// @return Whether withdrawal was initiated on zkSync Era before Legacy Bridge upgrade. - function _isEraLegacyTokenWithdrawal(uint256 _chainId, uint256 _l2BatchNumber) internal view returns (bool) { - require( - (_chainId != ERA_CHAIN_ID) || eraPostLegacyBridgeUpgradeFirstBatch != 0, - "ShB: LegacyUFB not set for Era" - ); - return (_chainId == ERA_CHAIN_ID) && (_l2BatchNumber < eraPostLegacyBridgeUpgradeFirstBatch); - } - - /// @dev Determines if a deposit was initiated on zkSync Era before the upgrade to the Shared Bridge. - /// @param _chainId The chain ID of the transaction to check. - /// @param _l2BatchNumber The L2 batch number for the deposit where it was processed. - /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the deposit was processed. - /// @return Whether deposit was initiated on zkSync Era before Shared Bridge upgrade. - function _isEraLegacyDeposit( - uint256 _chainId, - uint256 _l2BatchNumber, - uint256 _l2TxNumberInBatch - ) internal view returns (bool) { - require( - (_chainId != ERA_CHAIN_ID) || (eraLegacyBridgeLastDepositBatch != 0), - "ShB: last deposit time not set for Era" - ); - return - (_chainId == ERA_CHAIN_ID) && - (_l2BatchNumber < eraLegacyBridgeLastDepositBatch || - (_l2TxNumberInBatch < eraLegacyBridgeLastDepositTxNumber && - _l2BatchNumber == eraLegacyBridgeLastDepositBatch)); - } - - /// @notice Finalize the withdrawal and release funds - /// @param _chainId The chain ID of the transaction to check - /// @param _l2BatchNumber The L2 batch number where the withdrawal was processed - /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent - /// @param _message The L2 withdraw data, stored in an L2 -> L1 message - /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization - function finalizeWithdrawal( - uint256 _chainId, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external override { - // To avoid rewithdrawing txs that have already happened on the legacy bridge. - // Note: new withdraws are all recorded here, so double withdrawing them is not possible. - if (_isEraLegacyTokenWithdrawal(_chainId, _l2BatchNumber)) { - require(!legacyBridge.isWithdrawalFinalized(_l2BatchNumber, _l2MessageIndex), "ShB: legacy withdrawal"); - } - _finalizeWithdrawal({ - _chainId: _chainId, - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _message: _message, - _merkleProof: _merkleProof - }); - } - - struct MessageParams { - uint256 l2BatchNumber; - uint256 l2MessageIndex; - uint16 l2TxNumberInBatch; - } - - /// @dev Internal function that handles the logic for finalizing withdrawals, - /// serving both the current bridge system and the legacy ERC20 bridge. - function _finalizeWithdrawal( - uint256 _chainId, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) internal nonReentrant whenNotPaused returns (address l1Receiver, address l1Token, uint256 amount) { - require(!isWithdrawalFinalized[_chainId][_l2BatchNumber][_l2MessageIndex], "Withdrawal is already finalized"); - isWithdrawalFinalized[_chainId][_l2BatchNumber][_l2MessageIndex] = true; - - // Handling special case for withdrawal from zkSync Era initiated before Shared Bridge. - if (_isEraLegacyEthWithdrawal(_chainId, _l2BatchNumber)) { - // Checks that the withdrawal wasn't finalized already. - bool alreadyFinalized = IGetters(ERA_DIAMOND_PROXY).isEthWithdrawalFinalized( - _l2BatchNumber, - _l2MessageIndex - ); - require(!alreadyFinalized, "Withdrawal is already finalized 2"); - } - - MessageParams memory messageParams = MessageParams({ - l2BatchNumber: _l2BatchNumber, - l2MessageIndex: _l2MessageIndex, - l2TxNumberInBatch: _l2TxNumberInBatch - }); - (l1Receiver, l1Token, amount) = _checkWithdrawal(_chainId, messageParams, _message, _merkleProof); - - if (!hyperbridgingEnabled[_chainId]) { - // Check that the chain has sufficient balance - require(chainBalance[_chainId][l1Token] >= amount, "ShB not enough funds 2"); // not enough funds - chainBalance[_chainId][l1Token] -= amount; - } - - if (l1Token == ETH_TOKEN_ADDRESS) { - bool callSuccess; - // Low-level assembly call, to avoid any memory copying (save gas) - assembly { - callSuccess := call(gas(), l1Receiver, amount, 0, 0, 0, 0) - } - require(callSuccess, "ShB: withdraw failed"); - } else { - // Withdraw funds - IERC20(l1Token).safeTransfer(l1Receiver, amount); - } - emit WithdrawalFinalizedSharedBridge(_chainId, l1Receiver, l1Token, amount); - } - - /// @dev Verifies the validity of a withdrawal message from L2 and returns details of the withdrawal. - function _checkWithdrawal( - uint256 _chainId, - MessageParams memory _messageParams, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) internal view returns (address l1Receiver, address l1Token, uint256 amount) { - (l1Receiver, l1Token, amount) = _parseL2WithdrawalMessage(_chainId, _message); - L2Message memory l2ToL1Message; - { - bool baseTokenWithdrawal = (l1Token == BRIDGE_HUB.baseToken(_chainId)); - address l2Sender = baseTokenWithdrawal ? L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR : l2BridgeAddress[_chainId]; - - l2ToL1Message = L2Message({ - txNumberInBatch: _messageParams.l2TxNumberInBatch, - sender: l2Sender, - data: _message - }); - } - - bool success = BRIDGE_HUB.proveL2MessageInclusion({ - _chainId: _chainId, - _batchNumber: _messageParams.l2BatchNumber, - _index: _messageParams.l2MessageIndex, - _message: l2ToL1Message, - _proof: _merkleProof - }); - require(success, "ShB withd w proof"); // withdrawal wrong proof - } - - function _parseL2WithdrawalMessage( - uint256 _chainId, - bytes memory _l2ToL1message - ) internal view returns (address l1Receiver, address l1Token, uint256 amount) { - // We check that the message is long enough to read the data. - // Please note that there are two versions of the message: - // 1. The message that is sent by `withdraw(address _l1Receiver)` - // It should be equal to the length of the bytes4 function signature + address l1Receiver + uint256 amount = 4 + 20 + 32 = 56 (bytes). - // 2. The message that is sent by `withdrawWithMessage(address _l1Receiver, bytes calldata _additionalData)` - // It should be equal to the length of the following: - // bytes4 function signature + address l1Receiver + uint256 amount + address l2Sender + bytes _additionalData = - // = 4 + 20 + 32 + 32 + _additionalData.length >= 68 (bytes). - - // So the data is expected to be at least 56 bytes long. - require(_l2ToL1message.length >= 56, "ShB wrong msg len"); // wrong message length - - (uint32 functionSignature, uint256 offset) = UnsafeBytes.readUint32(_l2ToL1message, 0); - if (bytes4(functionSignature) == IMailbox.finalizeEthWithdrawal.selector) { - // this message is a base token withdrawal - (l1Receiver, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset); - (amount, offset) = UnsafeBytes.readUint256(_l2ToL1message, offset); - l1Token = BRIDGE_HUB.baseToken(_chainId); - } else if (bytes4(functionSignature) == IL1ERC20Bridge.finalizeWithdrawal.selector) { - // We use the IL1ERC20Bridge for backward compatibility with old withdrawals. - - // this message is a token withdrawal - - // Check that the message length is correct. - // It should be equal to the length of the function signature + address + address + uint256 = 4 + 20 + 20 + 32 = - // 76 (bytes). - require(_l2ToL1message.length == 76, "ShB wrong msg len 2"); - (l1Receiver, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset); - (l1Token, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset); - (amount, offset) = UnsafeBytes.readUint256(_l2ToL1message, offset); - } else { - revert("ShB Incorrect message function selector"); - } - } - - /*////////////////////////////////////////////////////////////// - ERA LEGACY FUNCTIONS - //////////////////////////////////////////////////////////////*/ - - /// @notice Initiates a deposit by locking funds on the contract and sending the request - /// of processing an L2 transaction where tokens would be minted. - /// @dev If the token is bridged for the first time, the L2 token contract will be deployed. Note however, that the - /// newly-deployed token does not support any custom logic, i.e. rebase tokens' functionality is not supported. - /// @param _l2Receiver The account address that should receive funds on L2 - /// @param _l1Token The L1 token address which is deposited - /// @param _amount The total amount of tokens to be bridged - /// @param _l2TxGasLimit The L2 gas limit to be used in the corresponding L2 transaction - /// @param _l2TxGasPerPubdataByte The gasPerPubdataByteLimit to be used in the corresponding L2 transaction - /// @param _refundRecipient The address on L2 that will receive the refund for the transaction. - /// @dev If the L2 deposit finalization transaction fails, the `_refundRecipient` will receive the `_l2Value`. - /// Please note, the contract may change the refund recipient's address to eliminate sending funds to addresses - /// out of control. - /// - If `_refundRecipient` is a contract on L1, the refund will be sent to the aliased `_refundRecipient`. - /// - If `_refundRecipient` is set to `address(0)` and the sender has NO deployed bytecode on L1, the refund will - /// be sent to the `msg.sender` address. - /// - If `_refundRecipient` is set to `address(0)` and the sender has deployed bytecode on L1, the refund will be - /// sent to the aliased `msg.sender` address. - /// @dev The address aliasing of L1 contracts as refund recipient on L2 is necessary to guarantee that the funds - /// are controllable through the Mailbox, since the Mailbox applies address aliasing to the from address for the - /// L2 tx if the L1 msg.sender is a contract. Without address aliasing for L1 contracts as refund recipients they - /// would not be able to make proper L2 tx requests through the Mailbox to use or withdraw the funds from L2, and - /// the funds would be lost. - /// @return l2TxHash The L2 transaction hash of deposit finalization. - function depositLegacyErc20Bridge( - address _prevMsgSender, - address _l2Receiver, - address _l1Token, - uint256 _amount, - uint256 _l2TxGasLimit, - uint256 _l2TxGasPerPubdataByte, - address _refundRecipient - ) external payable override onlyLegacyBridge nonReentrant whenNotPaused returns (bytes32 l2TxHash) { - require(l2BridgeAddress[ERA_CHAIN_ID] != address(0), "ShB b. n dep"); - require(_l1Token != L1_WETH_TOKEN, "ShB: WETH deposit not supported 2"); - - // Note that funds have been transferred to this contract in the legacy ERC20 bridge. - if (!hyperbridgingEnabled[ERA_CHAIN_ID]) { - chainBalance[ERA_CHAIN_ID][_l1Token] += _amount; - } - - bytes memory l2TxCalldata = _getDepositL2Calldata(_prevMsgSender, _l2Receiver, _l1Token, _amount); - - { - // If the refund recipient is not specified, the refund will be sent to the sender of the transaction. - // Otherwise, the refund will be sent to the specified address. - // If the recipient is a contract on L1, the address alias will be applied. - address refundRecipient = AddressAliasHelper.actualRefundRecipient(_refundRecipient, _prevMsgSender); - - L2TransactionRequestDirect memory request = L2TransactionRequestDirect({ - chainId: ERA_CHAIN_ID, - l2Contract: l2BridgeAddress[ERA_CHAIN_ID], - mintValue: msg.value, // l2 gas + l2 msg.Value the bridgehub will withdraw the mintValue from the base token bridge for gas - l2Value: 0, // L2 msg.value, this contract doesn't support base token deposits or wrapping functionality, for direct deposits use bridgehub - l2Calldata: l2TxCalldata, - l2GasLimit: _l2TxGasLimit, - l2GasPerPubdataByteLimit: _l2TxGasPerPubdataByte, - factoryDeps: new bytes[](0), - refundRecipient: refundRecipient - }); - l2TxHash = BRIDGE_HUB.requestL2TransactionDirect{value: msg.value}(request); - } - - bytes32 txDataHash = keccak256(abi.encode(_prevMsgSender, _l1Token, _amount)); - // Save the deposited amount to claim funds on L1 if the deposit failed on L2 - depositHappened[ERA_CHAIN_ID][l2TxHash] = txDataHash; - - emit LegacyDepositInitiated({ - chainId: ERA_CHAIN_ID, - l2DepositTxHash: l2TxHash, - from: _prevMsgSender, - to: _l2Receiver, - l1Token: _l1Token, - amount: _amount - }); - } - - /// @notice Finalizes the withdrawal for transactions initiated via the legacy ERC20 bridge. - /// @param _l2BatchNumber The L2 batch number where the withdrawal was processed - /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent - /// @param _message The L2 withdraw data, stored in an L2 -> L1 message - /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization - /// - /// @return l1Receiver The address on L1 that will receive the withdrawn funds - /// @return l1Token The address of the L1 token being withdrawn - /// @return amount The amount of the token being withdrawn - function finalizeWithdrawalLegacyErc20Bridge( - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external override onlyLegacyBridge returns (address l1Receiver, address l1Token, uint256 amount) { - (l1Receiver, l1Token, amount) = _finalizeWithdrawal({ - _chainId: ERA_CHAIN_ID, - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _message: _message, - _merkleProof: _merkleProof - }); - } - - /// @notice Withdraw funds from the initiated deposit, that failed when finalizing on zkSync Era chain. - /// This function is specifically designed for maintaining backward-compatibility with legacy `claimFailedDeposit` - /// method in `L1ERC20Bridge`. - /// - /// @param _depositSender The address of the deposit initiator - /// @param _l1Token The address of the deposited L1 ERC20 token - /// @param _amount The amount of the deposit that failed. - /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization - /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed - /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent - /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization - function claimFailedDepositLegacyErc20Bridge( - address _depositSender, - address _l1Token, - uint256 _amount, - bytes32 _l2TxHash, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes32[] calldata _merkleProof - ) external override onlyLegacyBridge { - _claimFailedDeposit({ - _checkedInLegacyBridge: true, - _chainId: ERA_CHAIN_ID, - _depositSender: _depositSender, - _l1Token: _l1Token, - _amount: _amount, - _l2TxHash: _l2TxHash, - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _merkleProof: _merkleProof - }); - } - - /*////////////////////////////////////////////////////////////// - PAUSE - //////////////////////////////////////////////////////////////*/ - - /// @notice Pauses all functions marked with the `whenNotPaused` modifier. - function pause() external onlyOwner { - _pause(); - } - - /// @notice Unpauses the contract, allowing all functions marked with the `whenNotPaused` modifier to be called again. - function unpause() external onlyOwner { - _unpause(); - } -} diff --git a/l2-contracts/contracts/bridge/L2SharedBridge.sol b/l1-contracts/contracts/bridge/L2SharedBridgeLegacy.sol similarity index 52% rename from l2-contracts/contracts/bridge/L2SharedBridge.sol rename to l1-contracts/contracts/bridge/L2SharedBridgeLegacy.sol index 4c33b4b12..4ae901593 100644 --- a/l2-contracts/contracts/bridge/L2SharedBridge.sol +++ b/l1-contracts/contracts/bridge/L2SharedBridgeLegacy.sol @@ -1,25 +1,30 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; -import {Initializable} from "@openzeppelin/contracts/proxy/utils/Initializable.sol"; -import {BeaconProxy} from "@openzeppelin/contracts/proxy/beacon/BeaconProxy.sol"; -import {UpgradeableBeacon} from "@openzeppelin/contracts/proxy/beacon/UpgradeableBeacon.sol"; +import {Initializable} from "@openzeppelin/contracts-v4/proxy/utils/Initializable.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; -import {IL1ERC20Bridge} from "./interfaces/IL1ERC20Bridge.sol"; -import {IL2SharedBridge} from "./interfaces/IL2SharedBridge.sol"; -import {IL2StandardToken} from "./interfaces/IL2StandardToken.sol"; +import {BridgedStandardERC20} from "./BridgedStandardERC20.sol"; -import {L2StandardERC20} from "./L2StandardERC20.sol"; +import {DEPLOYER_SYSTEM_CONTRACT, L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR} from "../common/L2ContractAddresses.sol"; +import {SystemContractsCaller} from "../common/libraries/SystemContractsCaller.sol"; +import {L2ContractHelper, IContractDeployer} from "../common/libraries/L2ContractHelper.sol"; import {AddressAliasHelper} from "../vendor/AddressAliasHelper.sol"; -import {L2ContractHelper, DEPLOYER_SYSTEM_CONTRACT, IContractDeployer} from "../L2ContractHelper.sol"; -import {SystemContractsCaller} from "../SystemContractsCaller.sol"; + +import {IL2AssetRouter} from "./asset-router/IL2AssetRouter.sol"; +import {IL2NativeTokenVault} from "./ntv/IL2NativeTokenVault.sol"; + +import {IL2SharedBridgeLegacy} from "./interfaces/IL2SharedBridgeLegacy.sol"; +import {InvalidCaller, ZeroAddress, EmptyBytes32, Unauthorized, AmountMustBeGreaterThanZero, DeployFailed} from "../common/L1ContractErrors.sol"; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev /// @notice The "default" bridge implementation for the ERC20 tokens. Note, that it does not /// support any custom token logic, i.e. rebase tokens' functionality is not supported. -contract L2SharedBridge is IL2SharedBridge, Initializable { +/// @dev Note, that this contract should be compatible with its previous version as it will be +/// the primary bridge to be used during migration. +contract L2SharedBridgeLegacy is IL2SharedBridgeLegacy, Initializable { /// @dev The address of the L1 shared bridge counterpart. address public override l1SharedBridge; @@ -35,46 +40,73 @@ contract L2SharedBridge is IL2SharedBridge, Initializable { /// @dev The address of the legacy L1 erc20 bridge counterpart. /// This is non-zero only on Era, and should not be renamed for backward compatibility with the SDKs. + // slither-disable-next-line uninitialized-state address public override l1Bridge; - /// @dev Contract is expected to be used as proxy implementation. - /// @dev Disable the initialization to prevent Parity hack. - uint256 immutable ERA_CHAIN_ID; + modifier onlyNTV() { + if (msg.sender != L2_NATIVE_TOKEN_VAULT_ADDR) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyAssetRouter() { + if (msg.sender != L2_ASSET_ROUTER_ADDR) { + revert Unauthorized(msg.sender); + } + _; + } - constructor(uint256 _eraChainId) { - ERA_CHAIN_ID = _eraChainId; + constructor() { _disableInitializers(); } /// @notice Initializes the bridge contract for later use. Expected to be used in the proxy. /// @param _l1SharedBridge The address of the L1 Bridge contract. - /// @param _l1Bridge The address of the legacy L1 Bridge contract. /// @param _l2TokenProxyBytecodeHash The bytecode hash of the proxy for tokens deployed by the bridge. /// @param _aliasedOwner The address of the governor contract. function initialize( address _l1SharedBridge, - address _l1Bridge, bytes32 _l2TokenProxyBytecodeHash, address _aliasedOwner ) external reinitializer(2) { - require(_l1SharedBridge != address(0), "bf"); - require(_l2TokenProxyBytecodeHash != bytes32(0), "df"); - require(_aliasedOwner != address(0), "sf"); + if (_l1SharedBridge == address(0)) { + revert ZeroAddress(); + } + + if (_l2TokenProxyBytecodeHash == bytes32(0)) { + revert EmptyBytes32(); + } + + if (_aliasedOwner == address(0)) { + revert ZeroAddress(); + } l1SharedBridge = _l1SharedBridge; - if (block.chainid != ERA_CHAIN_ID) { - address l2StandardToken = address(new L2StandardERC20{salt: bytes32(0)}()); + // The following statement is true only in freshly deployed environments. However, + // for those environments we do not need to deploy this contract at all. + // This check is primarily for local testing purposes. + if (l2TokenProxyBytecodeHash == bytes32(0) && address(l2TokenBeacon) == address(0)) { + address l2StandardToken = address(new BridgedStandardERC20{salt: bytes32(0)}()); l2TokenBeacon = new UpgradeableBeacon{salt: bytes32(0)}(l2StandardToken); l2TokenProxyBytecodeHash = _l2TokenProxyBytecodeHash; l2TokenBeacon.transferOwnership(_aliasedOwner); - } else { - require(_l1Bridge != address(0), "bf2"); - l1Bridge = _l1Bridge; - // l2StandardToken and l2TokenBeacon are already deployed on ERA, and stored in the proxy } } + /// @notice Initiates a withdrawal by burning funds on the contract and sending the message to L1 + /// where tokens would be unlocked + /// @param _l1Receiver The account address that should receive funds on L1 + /// @param _l2Token The L2 token address which is withdrawn + /// @param _amount The total amount of tokens to be withdrawn + function withdraw(address _l1Receiver, address _l2Token, uint256 _amount) external override { + if (_amount == 0) { + revert AmountMustBeGreaterThanZero(); + } + IL2AssetRouter(L2_ASSET_ROUTER_ADDR).withdrawLegacyBridge(_l1Receiver, _l2Token, _amount, msg.sender); + } + /// @notice Finalize the deposit and mint funds /// @param _l1Sender The account address that initiated the deposit on L1 /// @param _l2Receiver The account address that would receive minted ether @@ -87,70 +119,45 @@ contract L2SharedBridge is IL2SharedBridge, Initializable { address _l1Token, uint256 _amount, bytes calldata _data - ) external override { + ) external { // Only the L1 bridge counterpart can initiate and finalize the deposit. - require( - AddressAliasHelper.undoL1ToL2Alias(msg.sender) == l1Bridge || - AddressAliasHelper.undoL1ToL2Alias(msg.sender) == l1SharedBridge, - "mq" - ); - - address expectedL2Token = l2TokenAddress(_l1Token); - address currentL1Token = l1TokenAddress[expectedL2Token]; - if (currentL1Token == address(0)) { - address deployedToken = _deployL2Token(_l1Token, _data); - require(deployedToken == expectedL2Token, "mt"); - l1TokenAddress[expectedL2Token] = _l1Token; - } else { - require(currentL1Token == _l1Token, "gg"); // Double check that the expected value equal to real one + if ( + AddressAliasHelper.undoL1ToL2Alias(msg.sender) != l1Bridge && + AddressAliasHelper.undoL1ToL2Alias(msg.sender) != l1SharedBridge + ) { + revert InvalidCaller(msg.sender); } - IL2StandardToken(expectedL2Token).bridgeMint(_l2Receiver, _amount); - emit FinalizeDeposit(_l1Sender, _l2Receiver, expectedL2Token, _amount); - } + IL2AssetRouter(L2_ASSET_ROUTER_ADDR).finalizeDepositLegacyBridge({ + _l1Sender: _l1Sender, + _l2Receiver: _l2Receiver, + _l1Token: _l1Token, + _amount: _amount, + _data: _data + }); - /// @dev Deploy and initialize the L2 token for the L1 counterpart - function _deployL2Token(address _l1Token, bytes calldata _data) internal returns (address) { - bytes32 salt = _getCreate2Salt(_l1Token); - - BeaconProxy l2Token = _deployBeaconProxy(salt); - L2StandardERC20(address(l2Token)).bridgeInitialize(_l1Token, _data); - - return address(l2Token); - } - - /// @notice Initiates a withdrawal by burning funds on the contract and sending the message to L1 - /// where tokens would be unlocked - /// @param _l1Receiver The account address that should receive funds on L1 - /// @param _l2Token The L2 token address which is withdrawn - /// @param _amount The total amount of tokens to be withdrawn - function withdraw(address _l1Receiver, address _l2Token, uint256 _amount) external override { - require(_amount > 0, "Amount cannot be zero"); - - IL2StandardToken(_l2Token).bridgeBurn(msg.sender, _amount); + address l2Token = IL2NativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).l2TokenAddress(_l1Token); - address l1Token = l1TokenAddress[_l2Token]; - require(l1Token != address(0), "yh"); - - bytes memory message = _getL1WithdrawMessage(_l1Receiver, l1Token, _amount); - L2ContractHelper.sendMessageToL1(message); - - emit WithdrawalInitiated(msg.sender, _l1Receiver, _l2Token, _amount); - } + if (l1TokenAddress[l2Token] == address(0)) { + l1TokenAddress[l2Token] = _l1Token; + } - /// @dev Encode the message for l2ToL1log sent with withdraw initialization - function _getL1WithdrawMessage( - address _to, - address _l1Token, - uint256 _amount - ) internal pure returns (bytes memory) { - // note we use the IL1ERC20Bridge.finalizeWithdrawal function selector to specify the selector for L1<>L2 messages, - // and we use this interface so that when the switch happened the old messages could be processed - return abi.encodePacked(IL1ERC20Bridge.finalizeWithdrawal.selector, _to, _l1Token, _amount); + emit FinalizeDeposit(_l1Sender, _l2Receiver, l2Token, _amount); } /// @return Address of an L2 token counterpart function l2TokenAddress(address _l1Token) public view override returns (address) { + address token = IL2NativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).l2TokenAddress(_l1Token); + if (token != address(0)) { + return token; + } + return _calculateCreate2TokenAddress(_l1Token); + } + + /// @notice Calculates L2 wrapped token address given the currently stored beacon proxy bytecode hash and beacon address. + /// @param _l1Token The address of token on L1. + /// @return Address of an L2 token counterpart. + function _calculateCreate2TokenAddress(address _l1Token) internal view returns (address) { bytes32 constructorInputHash = keccak256(abi.encode(address(l2TokenBeacon), "")); bytes32 salt = _getCreate2Salt(_l1Token); return @@ -165,7 +172,7 @@ contract L2SharedBridge is IL2SharedBridge, Initializable { /// @dev Deploy the beacon proxy for the L2 token, while using ContractDeployer system contract. /// @dev This function uses raw call to ContractDeployer to make sure that exactly `l2TokenProxyBytecodeHash` is used /// for the code of the proxy. - function _deployBeaconProxy(bytes32 salt) internal returns (BeaconProxy proxy) { + function deployBeaconProxy(bytes32 salt) external onlyNTV returns (address proxy) { (bool success, bytes memory returndata) = SystemContractsCaller.systemCallWithReturndata( uint32(gasleft()), DEPLOYER_SYSTEM_CONTRACT, @@ -177,7 +184,14 @@ contract L2SharedBridge is IL2SharedBridge, Initializable { ); // The deployment should be successful and return the address of the proxy - require(success, "mk"); - proxy = BeaconProxy(abi.decode(returndata, (address))); + if (!success) { + revert DeployFailed(); + } + proxy = abi.decode(returndata, (address)); + } + + function sendMessageToL1(bytes calldata _message) external override onlyAssetRouter returns (bytes32) { + // slither-disable-next-line unused-return + return L2ContractHelper.sendMessageToL1(_message); } } diff --git a/l2-contracts/contracts/bridge/L2WrappedBaseToken.sol b/l1-contracts/contracts/bridge/L2WrappedBaseToken.sol similarity index 73% rename from l2-contracts/contracts/bridge/L2WrappedBaseToken.sol rename to l1-contracts/contracts/bridge/L2WrappedBaseToken.sol index 8cf4d7b5c..1c430cacd 100644 --- a/l2-contracts/contracts/bridge/L2WrappedBaseToken.sol +++ b/l1-contracts/contracts/bridge/L2WrappedBaseToken.sol @@ -1,11 +1,14 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; -import {ERC20PermitUpgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/extensions/draft-ERC20PermitUpgradeable.sol"; +import {ERC20PermitUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/token/ERC20/extensions/draft-ERC20PermitUpgradeable.sol"; import {IL2WrappedBaseToken} from "./interfaces/IL2WrappedBaseToken.sol"; -import {IL2StandardToken} from "./interfaces/IL2StandardToken.sol"; +import {IBridgedStandardToken} from "./interfaces/IBridgedStandardToken.sol"; +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "../common/L2ContractAddresses.sol"; + +import {ZeroAddress, Unauthorized, BridgeMintNotImplemented, WithdrawFailed} from "../common/L1ContractErrors.sol"; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev @@ -20,19 +23,37 @@ import {IL2StandardToken} from "./interfaces/IL2StandardToken.sol"; /// /// Note: This is an upgradeable contract. In the future, we will remove upgradeability to make it trustless. /// But for now, when the Rollup has instant upgradability, we leave the possibility of upgrading to improve the contract if needed. -contract L2WrappedBaseToken is ERC20PermitUpgradeable, IL2WrappedBaseToken, IL2StandardToken { +contract L2WrappedBaseToken is ERC20PermitUpgradeable, IL2WrappedBaseToken, IBridgedStandardToken { /// @dev Address of the L2 WETH Bridge. address public override l2Bridge; /// @dev Address of the L1 base token. It can be deposited to mint this L2 token. address public override l1Address; + /// @dev Address of the native token vault. + address public override nativeTokenVault; + + /// @dev The assetId of the base token. The wrapped token does not have its own assetId. + bytes32 public baseTokenAssetId; + + modifier onlyBridge() { + if (msg.sender != l2Bridge) { + revert Unauthorized(msg.sender); + } + _; + } + /// @dev Contract is expected to be used as proxy implementation. constructor() { // Disable initialization to prevent Parity hack. _disableInitializers(); } + /// @dev Fallback function to allow receiving Ether. + receive() external payable { + depositTo(msg.sender); + } + /// @notice Initializes a contract token for later use. Expected to be used in the proxy. /// @notice This function is used to integrate the previously deployed WETH token with the bridge. /// @dev Sets up `name`/`symbol`/`decimals` getters. @@ -42,15 +63,26 @@ contract L2WrappedBaseToken is ERC20PermitUpgradeable, IL2WrappedBaseToken, IL2S /// @param _l1Address Address of the L1 token that can be deposited to mint this L2 WETH. /// Note: The decimals are hardcoded to 18, the same as on Ether. function initializeV2( - string memory name_, - string memory symbol_, + string calldata name_, + string calldata symbol_, address _l2Bridge, - address _l1Address + address _l1Address, + bytes32 _baseTokenAssetId ) external reinitializer(2) { - require(_l2Bridge != address(0), "L2 bridge address cannot be zero"); - require(_l1Address != address(0), "L1 WETH token address cannot be zero"); + if (_l2Bridge == address(0)) { + revert ZeroAddress(); + } + + if (_l1Address == address(0)) { + revert ZeroAddress(); + } + if (_baseTokenAssetId == bytes32(0)) { + revert ZeroAddress(); + } l2Bridge = _l2Bridge; l1Address = _l1Address; + nativeTokenVault = L2_NATIVE_TOKEN_VAULT_ADDR; + baseTokenAssetId = _baseTokenAssetId; // Set decoded values for name and symbol. __ERC20_init_unchained(name_, symbol_); @@ -61,17 +93,12 @@ contract L2WrappedBaseToken is ERC20PermitUpgradeable, IL2WrappedBaseToken, IL2S emit Initialize(name_, symbol_, 18); } - modifier onlyBridge() { - require(msg.sender == l2Bridge, "permission denied"); // Only L2 bridge can call this method - _; - } - /// @notice Function for minting tokens on L2, implemented only to be compatible with IL2StandardToken interface. /// Always reverts instead of minting anything! /// Note: Use `deposit`/`depositTo` methods instead. // solhint-disable-next-line no-unused-vars function bridgeMint(address _to, uint256 _amount) external override onlyBridge { - revert("bridgeMint is not implemented! Use deposit/depositTo methods instead."); + revert BridgeMintNotImplemented(); } /// @dev Burn tokens from a given account and send the same amount of Ether to the bridge. @@ -82,7 +109,9 @@ contract L2WrappedBaseToken is ERC20PermitUpgradeable, IL2WrappedBaseToken, IL2S _burn(_from, _amount); // sends Ether to the bridge (bool success, ) = msg.sender.call{value: _amount}(""); - require(success, "Failed withdrawal"); + if (!success) { + revert WithdrawFailed(); + } emit BridgeBurn(_from, _amount); } @@ -107,11 +136,16 @@ contract L2WrappedBaseToken is ERC20PermitUpgradeable, IL2WrappedBaseToken, IL2S function withdrawTo(address _to, uint256 _amount) public override { _burn(msg.sender, _amount); (bool success, ) = _to.call{value: _amount}(""); - require(success, "Failed withdrawal"); + if (!success) { + revert WithdrawFailed(); + } } - /// @dev Fallback function to allow receiving Ether. - receive() external payable { - depositTo(msg.sender); + function originToken() external view override returns (address) { + return l1Address; + } + + function assetId() external view override returns (bytes32) { + return baseTokenAssetId; } } diff --git a/l1-contracts/contracts/bridge/asset-router/AssetRouterBase.sol b/l1-contracts/contracts/bridge/asset-router/AssetRouterBase.sol new file mode 100644 index 000000000..d6ca41bdf --- /dev/null +++ b/l1-contracts/contracts/bridge/asset-router/AssetRouterBase.sol @@ -0,0 +1,171 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/security/PausableUpgradeable.sol"; + +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; + +import {IAssetRouterBase} from "./IAssetRouterBase.sol"; +import {IAssetHandler} from "../interfaces/IAssetHandler.sol"; +import {DataEncoding} from "../../common/libraries/DataEncoding.sol"; + +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "../../common/L2ContractAddresses.sol"; + +import {IBridgehub} from "../../bridgehub/IBridgehub.sol"; +import {Unauthorized, AssetHandlerDoesNotExist} from "../../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Bridges assets between L1 and ZK chain, supporting both ETH and ERC20 tokens. +/// @dev Designed for use with a proxy for upgradability. +abstract contract AssetRouterBase is IAssetRouterBase, Ownable2StepUpgradeable, PausableUpgradeable { + using SafeERC20 for IERC20; + + /// @dev Bridgehub smart contract that is used to operate with L2 via asynchronous L2 <-> L1 communication. + IBridgehub public immutable override BRIDGE_HUB; + + /// @dev Chain ID of L1 for bridging reasons + uint256 public immutable L1_CHAIN_ID; + + /// @dev Chain ID of Era for legacy reasons + uint256 public immutable ERA_CHAIN_ID; + + /// @dev Maps asset ID to address of corresponding asset handler. + /// @dev Tracks the address of Asset Handler contracts, where bridged funds are locked for each asset. + /// @dev P.S. this liquidity was locked directly in SharedBridge before. + /// @dev Current AssetHandlers: NTV for tokens, Bridgehub for chains. + mapping(bytes32 assetId => address assetHandlerAddress) public assetHandlerAddress; + + /// @dev Maps asset ID to the asset deployment tracker address. + /// @dev Tracks the address of Deployment Tracker contract on L1, which sets Asset Handlers on L2s (ZK chain). + /// @dev For the asset and stores respective addresses. + /// @dev Current AssetDeploymentTrackers: NTV for tokens, CTMDeploymentTracker for chains. + mapping(bytes32 assetId => address assetDeploymentTracker) public assetDeploymentTracker; + + /** + * @dev This empty reserved space is put in place to allow future versions to add new + * variables without shifting down storage in the inheritance chain. + * See https://docs.openzeppelin.com/contracts/4.x/upgradeable#storage_gaps + */ + uint256[47] private __gap; + + /// @notice Checks that the message sender is the bridgehub. + modifier onlyBridgehub() { + if (msg.sender != address(BRIDGE_HUB)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @dev Contract is expected to be used as proxy implementation. + /// @dev Initialize the implementation to prevent Parity hack. + constructor(uint256 _l1ChainId, uint256 _eraChainId, IBridgehub _bridgehub) { + L1_CHAIN_ID = _l1ChainId; + ERA_CHAIN_ID = _eraChainId; + BRIDGE_HUB = _bridgehub; + } + + /// @inheritdoc IAssetRouterBase + function setAssetHandlerAddressThisChain( + bytes32 _assetRegistrationData, + address _assetHandlerAddress + ) external virtual override; + + function _setAssetHandlerAddressThisChain( + address _nativeTokenVault, + bytes32 _assetRegistrationData, + address _assetHandlerAddress + ) internal { + bool senderIsNTV = msg.sender == address(_nativeTokenVault); + address sender = senderIsNTV ? L2_NATIVE_TOKEN_VAULT_ADDR : msg.sender; + bytes32 assetId = DataEncoding.encodeAssetId(block.chainid, _assetRegistrationData, sender); + if (!senderIsNTV && msg.sender != assetDeploymentTracker[assetId]) { + revert Unauthorized(msg.sender); + } + assetHandlerAddress[assetId] = _assetHandlerAddress; + assetDeploymentTracker[assetId] = msg.sender; + emit AssetHandlerRegisteredInitial(assetId, _assetHandlerAddress, _assetRegistrationData, sender); + } + + /*////////////////////////////////////////////////////////////// + Receive transaction Functions + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IAssetRouterBase + function finalizeDeposit(uint256 _chainId, bytes32 _assetId, bytes calldata _transferData) public virtual; + + function _finalizeDeposit( + uint256 _chainId, + bytes32 _assetId, + bytes calldata _transferData, + address _nativeTokenVault + ) internal { + address assetHandler = assetHandlerAddress[_assetId]; + + if (assetHandler != address(0)) { + IAssetHandler(assetHandler).bridgeMint(_chainId, _assetId, _transferData); + } else { + assetHandlerAddress[_assetId] = _nativeTokenVault; + IAssetHandler(_nativeTokenVault).bridgeMint(_chainId, _assetId, _transferData); // ToDo: Maybe it's better to receive amount and receiver here? transferData may have different encoding + } + } + + /*////////////////////////////////////////////////////////////// + Internal Functions + //////////////////////////////////////////////////////////////*/ + + /// @dev send the burn message to the asset + /// @notice Forwards the burn request for specific asset to respective asset handler. + /// @param _chainId The chain ID of the ZK chain to which to deposit. + /// @param _nextMsgValue The L2 `msg.value` from the L1 -> L2 deposit transaction. + /// @param _assetId The deposited asset ID. + /// @param _originalCaller The `msg.sender` address from the external call that initiated current one. + /// @param _transferData The encoded data, which is used by the asset handler to determine L2 recipient and amount. Might include extra information. + /// @param _passValue Boolean indicating whether to pass msg.value in the call. + /// @return bridgeMintCalldata The calldata used by remote asset handler to mint tokens for recipient. + function _burn( + uint256 _chainId, + uint256 _nextMsgValue, + bytes32 _assetId, + address _originalCaller, + bytes memory _transferData, + bool _passValue + ) internal returns (bytes memory bridgeMintCalldata) { + address l1AssetHandler = assetHandlerAddress[_assetId]; + if (l1AssetHandler == address(0)) { + revert AssetHandlerDoesNotExist(_assetId); + } + + uint256 msgValue = _passValue ? msg.value : 0; + bridgeMintCalldata = IAssetHandler(l1AssetHandler).bridgeBurn{value: msgValue}({ + _chainId: _chainId, + _msgValue: _nextMsgValue, + _assetId: _assetId, + _originalCaller: _originalCaller, + _data: _transferData + }); + } + + /// @notice Ensures that token is registered with native token vault. + /// @dev Only used when deposit is made with legacy data encoding format. + /// @param _token The native token address which should be registered with native token vault. + /// @return assetId The asset ID of the token provided. + function _ensureTokenRegisteredWithNTV(address _token) internal virtual returns (bytes32 assetId); + + /*////////////////////////////////////////////////////////////// + PAUSE + //////////////////////////////////////////////////////////////*/ + + /// @notice Pauses all functions marked with the `whenNotPaused` modifier. + function pause() external onlyOwner { + _pause(); + } + + /// @notice Unpauses the contract, allowing all functions marked with the `whenNotPaused` modifier to be called again. + function unpause() external onlyOwner { + _unpause(); + } +} diff --git a/l1-contracts/contracts/bridge/asset-router/IAssetRouterBase.sol b/l1-contracts/contracts/bridge/asset-router/IAssetRouterBase.sol new file mode 100644 index 000000000..a307ba526 --- /dev/null +++ b/l1-contracts/contracts/bridge/asset-router/IAssetRouterBase.sol @@ -0,0 +1,73 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IBridgehub} from "../../bridgehub/IBridgehub.sol"; + +/// @dev The encoding version used for legacy txs. +bytes1 constant LEGACY_ENCODING_VERSION = 0x00; + +/// @dev The encoding version used for new txs. +bytes1 constant NEW_ENCODING_VERSION = 0x01; + +/// @dev The encoding version used for txs that set the asset handler on the counterpart contract. +bytes1 constant SET_ASSET_HANDLER_COUNTERPART_ENCODING_VERSION = 0x02; + +/// @title L1 Bridge contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IAssetRouterBase { + event BridgehubDepositBaseTokenInitiated( + uint256 indexed chainId, + address indexed from, + bytes32 assetId, + uint256 amount + ); + + event BridgehubDepositInitiated( + uint256 indexed chainId, + bytes32 indexed txDataHash, + address indexed from, + bytes32 assetId, + bytes bridgeMintCalldata + ); + + event BridgehubWithdrawalInitiated( + uint256 chainId, + address indexed sender, + bytes32 indexed assetId, + bytes32 assetDataHash // Todo: What's the point of emitting hash? + ); + + event AssetHandlerRegisteredInitial( + bytes32 indexed assetId, + address indexed assetHandlerAddress, + bytes32 indexed additionalData, + address assetDeploymentTracker + ); + + event AssetHandlerRegistered(bytes32 indexed assetId, address indexed _assetAddress); + + event DepositFinalizedAssetRouter(uint256 indexed chainId, bytes32 indexed assetId, bytes assetData); + + function BRIDGE_HUB() external view returns (IBridgehub); + + /// @notice Sets the asset handler address for a specified asset ID on the chain of the asset deployment tracker. + /// @dev The caller of this function is encoded within the `assetId`, therefore, it should be invoked by the asset deployment tracker contract. + /// @dev No access control on the caller, as msg.sender is encoded in the assetId. + /// @dev Typically, for most tokens, ADT is the native token vault. However, custom tokens may have their own specific asset deployment trackers. + /// @dev `setAssetHandlerAddressOnCounterpart` should be called on L1 to set asset handlers on L2 chains for a specific asset ID. + /// @param _assetRegistrationData The asset data which may include the asset address and any additional required data or encodings. + /// @param _assetHandlerAddress The address of the asset handler to be set for the provided asset. + function setAssetHandlerAddressThisChain(bytes32 _assetRegistrationData, address _assetHandlerAddress) external; + + function assetHandlerAddress(bytes32 _assetId) external view returns (address); + + /// @notice Finalize the withdrawal and release funds. + /// @param _chainId The chain ID of the transaction to check. + /// @param _assetId The bridged asset ID. + /// @param _transferData The position in the L2 logs Merkle tree of the l2Log that was sent with the message. + /// @dev We have both the legacy finalizeWithdrawal and the new finalizeDeposit functions, + /// finalizeDeposit uses the new format. On the L2 we have finalizeDeposit with new and old formats both. + function finalizeDeposit(uint256 _chainId, bytes32 _assetId, bytes memory _transferData) external; +} diff --git a/l1-contracts/contracts/bridge/asset-router/IL1AssetRouter.sol b/l1-contracts/contracts/bridge/asset-router/IL1AssetRouter.sol new file mode 100644 index 000000000..5d4c4fc5f --- /dev/null +++ b/l1-contracts/contracts/bridge/asset-router/IL1AssetRouter.sol @@ -0,0 +1,201 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; + +import {IL1Nullifier} from "../interfaces/IL1Nullifier.sol"; +import {INativeTokenVault} from "../ntv/INativeTokenVault.sol"; +import {IAssetRouterBase} from "./IAssetRouterBase.sol"; +import {L2TransactionRequestTwoBridgesInner} from "../../bridgehub/IBridgehub.sol"; +import {IL1SharedBridgeLegacy} from "../interfaces/IL1SharedBridgeLegacy.sol"; + +/// @title L1 Bridge contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL1AssetRouter is IAssetRouterBase, IL1SharedBridgeLegacy { + event BridgehubMintData(bytes bridgeMintData); + + event BridgehubDepositFinalized( + uint256 indexed chainId, + bytes32 indexed txDataHash, + bytes32 indexed l2DepositTxHash + ); + + event ClaimedFailedDepositAssetRouter(uint256 indexed chainId, bytes32 indexed assetId, bytes assetData); + + event AssetDeploymentTrackerSet( + bytes32 indexed assetId, + address indexed assetDeploymentTracker, + bytes32 indexed additionalData + ); + + event LegacyDepositInitiated( + uint256 indexed chainId, + bytes32 indexed l2DepositTxHash, + address indexed from, + address to, + address l1Asset, + uint256 amount + ); + + /// @notice Initiates a deposit by locking funds on the contract and sending the request + /// of processing an L2 transaction where tokens would be minted. + /// @dev If the token is bridged for the first time, the L2 token contract will be deployed. Note however, that the + /// newly-deployed token does not support any custom logic, i.e. rebase tokens' functionality is not supported. + /// @param _originalCaller The `msg.sender` address from the external call that initiated current one. + /// @param _l2Receiver The account address that should receive funds on L2. + /// @param _l1Token The L1 token address which is deposited. + /// @param _amount The total amount of tokens to be bridged. + /// @param _l2TxGasLimit The L2 gas limit to be used in the corresponding L2 transaction. + /// @param _l2TxGasPerPubdataByte The gasPerPubdataByteLimit to be used in the corresponding L2 transaction. + /// @param _refundRecipient The address on L2 that will receive the refund for the transaction. + /// @dev If the L2 deposit finalization transaction fails, the `_refundRecipient` will receive the `_l2Value`. + /// Please note, the contract may change the refund recipient's address to eliminate sending funds to addresses + /// out of control. + /// - If `_refundRecipient` is a contract on L1, the refund will be sent to the aliased `_refundRecipient`. + /// - If `_refundRecipient` is set to `address(0)` and the sender has NO deployed bytecode on L1, the refund will + /// be sent to the `msg.sender` address. + /// - If `_refundRecipient` is set to `address(0)` and the sender has deployed bytecode on L1, the refund will be + /// sent to the aliased `msg.sender` address. + /// @dev The address aliasing of L1 contracts as refund recipient on L2 is necessary to guarantee that the funds + /// are controllable through the Mailbox, since the Mailbox applies address aliasing to the from address for the + /// L2 tx if the L1 msg.sender is a contract. Without address aliasing for L1 contracts as refund recipients they + /// would not be able to make proper L2 tx requests through the Mailbox to use or withdraw the funds from L2, and + /// the funds would be lost. + /// @return txHash The L2 transaction hash of deposit finalization. + function depositLegacyErc20Bridge( + address _originalCaller, + address _l2Receiver, + address _l1Token, + uint256 _amount, + uint256 _l2TxGasLimit, + uint256 _l2TxGasPerPubdataByte, + address _refundRecipient + ) external payable returns (bytes32 txHash); + + function L1_NULLIFIER() external view returns (IL1Nullifier); + + function L1_WETH_TOKEN() external view returns (address); + + function nativeTokenVault() external view returns (INativeTokenVault); + + function setAssetDeploymentTracker(bytes32 _assetRegistrationData, address _assetDeploymentTracker) external; + + function setNativeTokenVault(INativeTokenVault _nativeTokenVault) external; + + /// @notice Withdraw funds from the initiated deposit, that failed when finalizing on L2. + /// @param _chainId The ZK chain id to which the deposit was initiated. + /// @param _depositSender The address of the entity that initiated the deposit. + /// @param _assetId The unique identifier of the deposited L1 token. + /// @param _assetData The encoded transfer data, which includes both the deposit amount and the address of the L2 receiver. Might include extra information. + /// @dev Processes claims of failed deposit, whether they originated from the legacy bridge or the current system. + function bridgeRecoverFailedTransfer( + uint256 _chainId, + address _depositSender, + bytes32 _assetId, + bytes calldata _assetData + ) external; + + /// @dev Withdraw funds from the initiated deposit, that failed when finalizing on L2. + /// @param _chainId The ZK chain id to which deposit was initiated. + /// @param _depositSender The address of the entity that initiated the deposit. + /// @param _assetId The unique identifier of the deposited L1 token. + /// @param _assetData The encoded transfer data, which includes both the deposit amount and the address of the L2 receiver. Might include extra information. + /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization. + /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed. + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message. + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent. + /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization. + /// @dev Processes claims of failed deposit, whether they originated from the legacy bridge or the current system. + function bridgeRecoverFailedTransfer( + uint256 _chainId, + address _depositSender, + bytes32 _assetId, + bytes memory _assetData, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external; + + /// @notice Transfers funds to Native Token Vault, if the asset is registered with it. Does nothing for ETH or non-registered tokens. + /// @dev assetId is not the padded address, but the correct encoded id (NTV stores respective format for IDs) + /// @param _amount The asset amount to be transferred to native token vault. + /// @param _originalCaller The `msg.sender` address from the external call that initiated current one. + function transferFundsToNTV(bytes32 _assetId, uint256 _amount, address _originalCaller) external returns (bool); + + /// @notice Finalize the withdrawal and release funds + /// @param _chainId The chain ID of the transaction to check + /// @param _l2BatchNumber The L2 batch number where the withdrawal was processed + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message + /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent + /// @param _message The L2 withdraw data, stored in an L2 -> L1 message + /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization + function finalizeWithdrawal( + uint256 _chainId, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes calldata _message, + bytes32[] calldata _merkleProof + ) external; + + /// @notice Initiates a transfer transaction within Bridgehub, used by `requestL2TransactionTwoBridges`. + /// @param _chainId The chain ID of the ZK chain to which deposit. + /// @param _originalCaller The `msg.sender` address from the external call that initiated current one. + /// @param _value The `msg.value` on the target chain tx. + /// @param _data The calldata for the second bridge deposit. + /// @return request The data used by the bridgehub to create L2 transaction request to specific ZK chain. + /// @dev Data has the following abi encoding for legacy deposits: + /// address _l1Token, + /// uint256 _amount, + /// address _l2Receiver + /// for new deposits: + /// bytes32 _assetId, + /// bytes _transferData + function bridgehubDeposit( + uint256 _chainId, + address _originalCaller, + uint256 _value, + bytes calldata _data + ) external payable returns (L2TransactionRequestTwoBridgesInner memory request); + + /// @notice Generates a calldata for calling the deposit finalization on the L2 native token contract. + // / @param _chainId The chain ID of the ZK chain to which deposit. + /// @param _sender The address of the deposit initiator. + /// @param _assetId The deposited asset ID. + /// @param _assetData The encoded data, which is used by the asset handler to determine L2 recipient and amount. Might include extra information. + /// @return Returns calldata used on ZK chain. + function getDepositCalldata( + address _sender, + bytes32 _assetId, + bytes memory _assetData + ) external view returns (bytes memory); + + /// @notice Allows bridgehub to acquire mintValue for L1->L2 transactions. + /// @dev If the corresponding L2 transaction fails, refunds are issued to a refund recipient on L2. + /// @param _chainId The chain ID of the ZK chain to which deposit. + /// @param _assetId The deposited asset ID. + /// @param _originalCaller The `msg.sender` address from the external call that initiated current one. + /// @param _amount The total amount of tokens to be bridged. + function bridgehubDepositBaseToken( + uint256 _chainId, + bytes32 _assetId, + address _originalCaller, + uint256 _amount + ) external payable; + + /// @notice Routes the confirmation to nullifier for backward compatibility. + /// @notice Confirms the acceptance of a transaction by the Mailbox, as part of the L2 transaction process within Bridgehub. + /// This function is utilized by `requestL2TransactionTwoBridges` to validate the execution of a transaction. + /// @param _chainId The chain ID of the ZK chain to which confirm the deposit. + /// @param _txDataHash The keccak256 hash of 0x01 || abi.encode(bytes32, bytes) to identify deposits. + /// @param _txHash The hash of the L1->L2 transaction to confirm the deposit. + function bridgehubConfirmL2Transaction(uint256 _chainId, bytes32 _txDataHash, bytes32 _txHash) external; + + function isWithdrawalFinalized( + uint256 _chainId, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex + ) external view returns (bool); +} diff --git a/l1-contracts/contracts/bridge/asset-router/IL2AssetRouter.sol b/l1-contracts/contracts/bridge/asset-router/IL2AssetRouter.sol new file mode 100644 index 000000000..81b1bc995 --- /dev/null +++ b/l1-contracts/contracts/bridge/asset-router/IL2AssetRouter.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +import {IAssetRouterBase} from "./IAssetRouterBase.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL2AssetRouter is IAssetRouterBase { + event WithdrawalInitiatedAssetRouter( + uint256 chainId, + address indexed l2Sender, + bytes32 indexed assetId, + bytes assetData + ); + + function withdraw(bytes32 _assetId, bytes calldata _transferData) external returns (bytes32); + + function l1AssetRouter() external view returns (address); + + function withdrawLegacyBridge(address _l1Receiver, address _l2Token, uint256 _amount, address _sender) external; + + function finalizeDepositLegacyBridge( + address _l1Sender, + address _l2Receiver, + address _l1Token, + uint256 _amount, + bytes calldata _data + ) external; + + /// @dev Used to set the assedAddress for a given assetId. + /// @dev Will be used by ZK Gateway + function setAssetHandlerAddress(uint256 _originChainId, bytes32 _assetId, address _assetAddress) external; +} diff --git a/l1-contracts/contracts/bridge/asset-router/L1AssetRouter.sol b/l1-contracts/contracts/bridge/asset-router/L1AssetRouter.sol new file mode 100644 index 000000000..ff94d0db5 --- /dev/null +++ b/l1-contracts/contracts/bridge/asset-router/L1AssetRouter.sol @@ -0,0 +1,631 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable reason-string, gas-custom-errors + +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; + +import {IL1AssetRouter} from "./IL1AssetRouter.sol"; +import {IL2AssetRouter} from "./IL2AssetRouter.sol"; +import {IAssetRouterBase, LEGACY_ENCODING_VERSION, NEW_ENCODING_VERSION, SET_ASSET_HANDLER_COUNTERPART_ENCODING_VERSION} from "./IAssetRouterBase.sol"; +import {AssetRouterBase} from "./AssetRouterBase.sol"; + +import {IL1AssetHandler} from "../interfaces/IL1AssetHandler.sol"; +import {IL1ERC20Bridge} from "../interfaces/IL1ERC20Bridge.sol"; +import {IAssetHandler} from "../interfaces/IAssetHandler.sol"; +import {IL1Nullifier, FinalizeL1DepositParams} from "../interfaces/IL1Nullifier.sol"; +import {INativeTokenVault} from "../ntv/INativeTokenVault.sol"; +import {IL2SharedBridgeLegacyFunctions} from "../interfaces/IL2SharedBridgeLegacyFunctions.sol"; + +import {ReentrancyGuard} from "../../common/ReentrancyGuard.sol"; +import {DataEncoding} from "../../common/libraries/DataEncoding.sol"; +import {AddressAliasHelper} from "../../vendor/AddressAliasHelper.sol"; +import {TWO_BRIDGES_MAGIC_VALUE, ETH_TOKEN_ADDRESS} from "../../common/Config.sol"; +import {UnsupportedEncodingVersion, AssetIdNotSupported, AssetHandlerDoesNotExist, Unauthorized, ZeroAddress, TokenNotSupported, AddressAlreadyUsed} from "../../common/L1ContractErrors.sol"; +import {L2_ASSET_ROUTER_ADDR} from "../../common/L2ContractAddresses.sol"; + +import {IBridgehub, L2TransactionRequestTwoBridgesInner, L2TransactionRequestDirect} from "../../bridgehub/IBridgehub.sol"; + +import {IL1AssetDeploymentTracker} from "../interfaces/IL1AssetDeploymentTracker.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Bridges assets between L1 and ZK chain, supporting both ETH and ERC20 tokens. +/// @dev Designed for use with a proxy for upgradability. +contract L1AssetRouter is AssetRouterBase, IL1AssetRouter, ReentrancyGuard { + using SafeERC20 for IERC20; + + /// @dev The address of the WETH token on L1. + address public immutable override L1_WETH_TOKEN; + + /// @dev The address of ZKsync Era diamond proxy contract. + address internal immutable ERA_DIAMOND_PROXY; + + /// @dev Address of nullifier. + IL1Nullifier public immutable L1_NULLIFIER; + + /// @dev Address of native token vault. + INativeTokenVault public nativeTokenVault; + + /// @dev Address of legacy bridge. + IL1ERC20Bridge public legacyBridge; + + /// @notice Checks that the message sender is the nullifier. + modifier onlyNullifier() { + if (msg.sender != address(L1_NULLIFIER)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks that the message sender is the bridgehub or ZKsync Era Diamond Proxy. + modifier onlyBridgehubOrEra(uint256 _chainId) { + if (msg.sender != address(BRIDGE_HUB) && (_chainId != ERA_CHAIN_ID || msg.sender != ERA_DIAMOND_PROXY)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks that the message sender is the legacy bridge. + modifier onlyLegacyBridge() { + if (msg.sender != address(legacyBridge)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks that the message sender is the native token vault. + modifier onlyNativeTokenVault() { + if (msg.sender != address(nativeTokenVault)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @dev Contract is expected to be used as proxy implementation. + /// @dev Initialize the implementation to prevent Parity hack. + constructor( + address _l1WethAddress, + address _bridgehub, + address _l1Nullifier, + uint256 _eraChainId, + address _eraDiamondProxy + ) reentrancyGuardInitializer AssetRouterBase(block.chainid, _eraChainId, IBridgehub(_bridgehub)) { + _disableInitializers(); + L1_WETH_TOKEN = _l1WethAddress; + ERA_DIAMOND_PROXY = _eraDiamondProxy; + L1_NULLIFIER = IL1Nullifier(_l1Nullifier); + } + + /// @dev Initializes a contract bridge for later use. Expected to be used in the proxy. + /// @dev Used for testing purposes only, as the contract has been initialized on mainnet. + /// @param _owner The address which can change L2 token implementation and upgrade the bridge implementation. + /// The owner is the Governor and separate from the ProxyAdmin from now on, so that the Governor can call the bridge. + function initialize(address _owner) external reentrancyGuardInitializer initializer { + if (_owner == address(0)) { + revert ZeroAddress(); + } + _transferOwnership(_owner); + } + + /// @notice Sets the L1ERC20Bridge contract address. + /// @dev Should be called only once by the owner. + /// @param _nativeTokenVault The address of the native token vault. + function setNativeTokenVault(INativeTokenVault _nativeTokenVault) external onlyOwner { + require(address(nativeTokenVault) == address(0), "AR: native token v already set"); + require(address(_nativeTokenVault) != address(0), "AR: native token vault 0"); + nativeTokenVault = _nativeTokenVault; + bytes32 ethAssetId = DataEncoding.encodeNTVAssetId(block.chainid, ETH_TOKEN_ADDRESS); + assetHandlerAddress[ethAssetId] = address(nativeTokenVault); + } + + /// @notice Sets the L1ERC20Bridge contract address. + /// @dev Should be called only once by the owner. + /// @param _legacyBridge The address of the legacy bridge. + function setL1Erc20Bridge(IL1ERC20Bridge _legacyBridge) external onlyOwner { + if (address(legacyBridge) != address(0)) { + revert AddressAlreadyUsed(address(legacyBridge)); + } + if (address(_legacyBridge) == address(0)) { + revert ZeroAddress(); + } + legacyBridge = _legacyBridge; + } + + /// @notice Used to set the assed deployment tracker address for given asset data. + /// @param _assetRegistrationData The asset data which may include the asset address and any additional required data or encodings. + /// @param _assetDeploymentTracker The whitelisted address of asset deployment tracker for provided asset. + function setAssetDeploymentTracker( + bytes32 _assetRegistrationData, + address _assetDeploymentTracker + ) external onlyOwner { + bytes32 assetId = keccak256( + abi.encode(uint256(block.chainid), _assetDeploymentTracker, _assetRegistrationData) + ); + assetDeploymentTracker[assetId] = _assetDeploymentTracker; + emit AssetDeploymentTrackerSet(assetId, _assetDeploymentTracker, _assetRegistrationData); + } + + /// @inheritdoc IAssetRouterBase + function setAssetHandlerAddressThisChain( + bytes32 _assetRegistrationData, + address _assetHandlerAddress + ) external override(AssetRouterBase, IAssetRouterBase) { + _setAssetHandlerAddressThisChain(address(nativeTokenVault), _assetRegistrationData, _assetHandlerAddress); + } + + /// @notice Used to set the asset handler address for a given asset ID on a remote ZK chain + /// @dev No access control on the caller, as msg.sender is encoded in the assetId. + /// @param _chainId The ZK chain ID. + /// @param _originalCaller The `msg.sender` address from the external call that initiated current one. + /// @param _assetId The encoding of asset ID. + /// @param _assetHandlerAddressOnCounterpart The address of the asset handler, which will hold the token of interest. + /// @return request The tx request sent to the Bridgehub + function _setAssetHandlerAddressOnCounterpart( + uint256 _chainId, + address _originalCaller, + bytes32 _assetId, + address _assetHandlerAddressOnCounterpart + ) internal view returns (L2TransactionRequestTwoBridgesInner memory request) { + IL1AssetDeploymentTracker(assetDeploymentTracker[_assetId]).bridgeCheckCounterpartAddress( + _chainId, + _assetId, + _originalCaller, + _assetHandlerAddressOnCounterpart + ); + + bytes memory l2Calldata = abi.encodeCall( + IL2AssetRouter.setAssetHandlerAddress, + (block.chainid, _assetId, _assetHandlerAddressOnCounterpart) + ); + request = L2TransactionRequestTwoBridgesInner({ + magicValue: TWO_BRIDGES_MAGIC_VALUE, + l2Contract: L2_ASSET_ROUTER_ADDR, + l2Calldata: l2Calldata, + factoryDeps: new bytes[](0), + txDataHash: bytes32(0x00) + }); + } + + /*////////////////////////////////////////////////////////////// + INITIATTE DEPOSIT Functions + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IL1AssetRouter + function bridgehubDepositBaseToken( + uint256 _chainId, + bytes32 _assetId, + address _originalCaller, + uint256 _amount + ) public payable virtual override onlyBridgehubOrEra(_chainId) whenNotPaused { + address assetHandler = assetHandlerAddress[_assetId]; + if (assetHandler == address(0)) { + revert AssetHandlerDoesNotExist(_assetId); + } + + // slither-disable-next-line unused-return + IAssetHandler(assetHandler).bridgeBurn{value: msg.value}({ + _chainId: _chainId, + _msgValue: 0, + _assetId: _assetId, + _originalCaller: _originalCaller, + _data: abi.encode(_amount, address(0)) + }); + + // Note that we don't save the deposited amount, as this is for the base token, which gets sent to the refundRecipient if the tx fails + emit BridgehubDepositBaseTokenInitiated(_chainId, _originalCaller, _assetId, _amount); + } + + /// @inheritdoc IL1AssetRouter + function bridgehubDeposit( + uint256 _chainId, + address _originalCaller, + uint256 _value, + bytes calldata _data + ) + external + payable + virtual + override + onlyBridgehub + whenNotPaused + returns (L2TransactionRequestTwoBridgesInner memory request) + { + bytes32 assetId; + bytes memory transferData; + bytes1 encodingVersion = _data[0]; + // The new encoding ensures that the calldata is collision-resistant with respect to the legacy format. + // In the legacy calldata, the first input was the address, meaning the most significant byte was always `0x00`. + if (encodingVersion == SET_ASSET_HANDLER_COUNTERPART_ENCODING_VERSION) { + (bytes32 _assetId, address _assetHandlerAddressOnCounterpart) = abi.decode(_data[1:], (bytes32, address)); + return + _setAssetHandlerAddressOnCounterpart( + _chainId, + _originalCaller, + _assetId, + _assetHandlerAddressOnCounterpart + ); + } else if (encodingVersion == NEW_ENCODING_VERSION) { + (assetId, transferData) = abi.decode(_data[1:], (bytes32, bytes)); + } else if (encodingVersion == LEGACY_ENCODING_VERSION) { + (assetId, transferData) = _handleLegacyData(_data, _originalCaller); + } else { + revert UnsupportedEncodingVersion(); + } + + if (BRIDGE_HUB.baseTokenAssetId(_chainId) == assetId) { + revert AssetIdNotSupported(assetId); + } + + bytes memory bridgeMintCalldata = _burn({ + _chainId: _chainId, + _nextMsgValue: _value, + _assetId: assetId, + _originalCaller: _originalCaller, + _transferData: transferData, + _passValue: true + }); + + bytes32 txDataHash = DataEncoding.encodeTxDataHash({ + _nativeTokenVault: address(nativeTokenVault), + _encodingVersion: encodingVersion, + _originalCaller: _originalCaller, + _assetId: assetId, + _transferData: transferData + }); + + request = _requestToBridge({ + _originalCaller: _originalCaller, + _assetId: assetId, + _bridgeMintCalldata: bridgeMintCalldata, + _txDataHash: txDataHash + }); + + emit BridgehubDepositInitiated({ + chainId: _chainId, + txDataHash: txDataHash, + from: _originalCaller, + assetId: assetId, + bridgeMintCalldata: bridgeMintCalldata + }); + } + + /// @inheritdoc IL1AssetRouter + function bridgehubConfirmL2Transaction( + uint256 _chainId, + bytes32 _txDataHash, + bytes32 _txHash + ) external override onlyBridgehub whenNotPaused { + L1_NULLIFIER.bridgehubConfirmL2TransactionForwarded(_chainId, _txDataHash, _txHash); + } + + /*////////////////////////////////////////////////////////////// + Receive transaction Functions + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IAssetRouterBase + function finalizeDeposit( + uint256 _chainId, + bytes32 _assetId, + bytes calldata _transferData + ) public override(AssetRouterBase, IAssetRouterBase) onlyNullifier { + _finalizeDeposit(_chainId, _assetId, _transferData, address(nativeTokenVault)); + emit DepositFinalizedAssetRouter(_chainId, _assetId, _transferData); + } + + /*////////////////////////////////////////////////////////////// + CLAIM FAILED DEPOSIT Functions + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IL1AssetRouter + function bridgeRecoverFailedTransfer( + uint256 _chainId, + address _depositSender, + bytes32 _assetId, + bytes calldata _assetData + ) external override onlyNullifier nonReentrant whenNotPaused { + IL1AssetHandler(assetHandlerAddress[_assetId]).bridgeRecoverFailedTransfer( + _chainId, + _assetId, + _depositSender, + _assetData + ); + + emit ClaimedFailedDepositAssetRouter(_chainId, _assetId, _assetData); + } + + function bridgeRecoverFailedTransfer( + uint256 _chainId, + address _depositSender, + bytes32 _assetId, + bytes calldata _assetData, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external { + L1_NULLIFIER.bridgeRecoverFailedTransfer({ + _chainId: _chainId, + _depositSender: _depositSender, + _assetId: _assetId, + _assetData: _assetData, + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof + }); + } + + /*////////////////////////////////////////////////////////////// + Internal & Helpers + //////////////////////////////////////////////////////////////*/ + + /// @notice Decodes the transfer input for legacy data and transfers allowance to NTV. + /// @dev Is not applicable for custom asset handlers. + /// @param _data The encoded transfer data (address _l1Token, uint256 _depositAmount, address _l2Receiver). + /// @return Tuple of asset ID and encoded transfer data to conform with new encoding standard. + function _handleLegacyData(bytes calldata _data, address) internal returns (bytes32, bytes memory) { + (address _l1Token, uint256 _depositAmount, address _l2Receiver) = abi.decode( + _data, + (address, uint256, address) + ); + bytes32 assetId = _ensureTokenRegisteredWithNTV(_l1Token); + return (assetId, abi.encode(_depositAmount, _l2Receiver)); + } + + /// @notice Ensures that token is registered with native token vault. + /// @dev Only used when deposit is made with legacy data encoding format. + /// @param _token The native token address which should be registered with native token vault. + /// @return assetId The asset ID of the token provided. + function _ensureTokenRegisteredWithNTV(address _token) internal override returns (bytes32 assetId) { + assetId = nativeTokenVault.assetId(_token); + if (assetId != bytes32(0)) { + return assetId; + } + nativeTokenVault.ensureTokenIsRegistered(_token); + assetId = nativeTokenVault.assetId(_token); + } + + /// @inheritdoc IL1AssetRouter + function transferFundsToNTV( + bytes32 _assetId, + uint256 _amount, + address _originalCaller + ) external onlyNativeTokenVault returns (bool) { + address l1TokenAddress = INativeTokenVault(address(nativeTokenVault)).tokenAddress(_assetId); + if (l1TokenAddress == address(0) || l1TokenAddress == ETH_TOKEN_ADDRESS) { + return false; + } + IERC20 l1Token = IERC20(l1TokenAddress); + + // Do the transfer if allowance to Shared bridge is bigger than amount + // And if there is not enough allowance for the NTV + if ( + l1Token.allowance(_originalCaller, address(this)) >= _amount && + l1Token.allowance(_originalCaller, address(nativeTokenVault)) < _amount + ) { + // slither-disable-next-line arbitrary-send-erc20 + l1Token.safeTransferFrom(_originalCaller, address(nativeTokenVault), _amount); + return true; + } + return false; + } + + /// @dev The request data that is passed to the bridgehub. + /// @param _originalCaller The `msg.sender` address from the external call that initiated current one. + /// @param _assetId The deposited asset ID. + /// @param _bridgeMintCalldata The calldata used by remote asset handler to mint tokens for recipient. + /// @param _txDataHash The keccak256 hash of 0x01 || abi.encode(bytes32, bytes) to identify deposits. + /// @return request The data used by the bridgehub to create L2 transaction request to specific ZK chain. + function _requestToBridge( + address _originalCaller, + bytes32 _assetId, + bytes memory _bridgeMintCalldata, + bytes32 _txDataHash + ) internal view virtual returns (L2TransactionRequestTwoBridgesInner memory request) { + bytes memory l2TxCalldata = getDepositCalldata(_originalCaller, _assetId, _bridgeMintCalldata); + + request = L2TransactionRequestTwoBridgesInner({ + magicValue: TWO_BRIDGES_MAGIC_VALUE, + l2Contract: L2_ASSET_ROUTER_ADDR, + l2Calldata: l2TxCalldata, + factoryDeps: new bytes[](0), + txDataHash: _txDataHash + }); + } + + /// @inheritdoc IL1AssetRouter + function getDepositCalldata( + address _sender, + bytes32 _assetId, + bytes memory _assetData + ) public view override returns (bytes memory) { + // First branch covers the case when asset is not registered with NTV (custom asset handler) + // Second branch handles tokens registered with NTV and uses legacy calldata encoding + // We need to use the legacy encoding to support the old SDK, which relies on a specific encoding of the data. + if ( + (nativeTokenVault.tokenAddress(_assetId) == address(0)) || + (nativeTokenVault.originChainId(_assetId) != block.chainid) + ) { + return abi.encodeCall(IAssetRouterBase.finalizeDeposit, (block.chainid, _assetId, _assetData)); + } else { + // slither-disable-next-line unused-return + (, address _receiver, address _parsedNativeToken, uint256 _amount, bytes memory _gettersData) = DataEncoding + .decodeBridgeMintData(_assetData); + return + _getLegacyNTVCalldata({ + _sender: _sender, + _receiver: _receiver, + _parsedNativeToken: _parsedNativeToken, + _amount: _amount, + _gettersData: _gettersData + }); + } + } + + function _getLegacyNTVCalldata( + address _sender, + address _receiver, + address _parsedNativeToken, + uint256 _amount, + bytes memory _gettersData + ) internal pure returns (bytes memory) { + return + abi.encodeCall( + IL2SharedBridgeLegacyFunctions.finalizeDeposit, + (_sender, _receiver, _parsedNativeToken, _amount, _gettersData) + ); + } + + /*////////////////////////////////////////////////////////////// + Legacy Functions + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IL1AssetRouter + function depositLegacyErc20Bridge( + address _originalCaller, + address _l2Receiver, + address _l1Token, + uint256 _amount, + uint256 _l2TxGasLimit, + uint256 _l2TxGasPerPubdataByte, + address _refundRecipient + ) external payable override onlyLegacyBridge nonReentrant whenNotPaused returns (bytes32 txHash) { + if (_l1Token == L1_WETH_TOKEN) { + revert TokenNotSupported(L1_WETH_TOKEN); + } + + bytes32 _assetId; + bytes memory bridgeMintCalldata; + + { + // Inner call to encode data to decrease local var numbers + _assetId = _ensureTokenRegisteredWithNTV(_l1Token); + IERC20(_l1Token).forceApprove(address(nativeTokenVault), _amount); + + bridgeMintCalldata = _burn({ + _chainId: ERA_CHAIN_ID, + _nextMsgValue: 0, + _assetId: _assetId, + _originalCaller: _originalCaller, + _transferData: abi.encode(_amount, _l2Receiver), + _passValue: false + }); + } + + { + bytes memory l2TxCalldata = getDepositCalldata(_originalCaller, _assetId, bridgeMintCalldata); + + // If the refund recipient is not specified, the refund will be sent to the sender of the transaction. + // Otherwise, the refund will be sent to the specified address. + // If the recipient is a contract on L1, the address alias will be applied. + address refundRecipient = AddressAliasHelper.actualRefundRecipient(_refundRecipient, _originalCaller); + + L2TransactionRequestDirect memory request = L2TransactionRequestDirect({ + chainId: ERA_CHAIN_ID, + l2Contract: L2_ASSET_ROUTER_ADDR, + mintValue: msg.value, // l2 gas + l2 msg.Value the bridgehub will withdraw the mintValue from the base token bridge for gas + l2Value: 0, // L2 msg.value, this contract doesn't support base token deposits or wrapping functionality, for direct deposits use bridgehub + l2Calldata: l2TxCalldata, + l2GasLimit: _l2TxGasLimit, + l2GasPerPubdataByteLimit: _l2TxGasPerPubdataByte, + factoryDeps: new bytes[](0), + refundRecipient: refundRecipient + }); + txHash = BRIDGE_HUB.requestL2TransactionDirect{value: msg.value}(request); + } + + // Save the deposited amount to claim funds on L1 if the deposit failed on L2 + L1_NULLIFIER.bridgehubConfirmL2TransactionForwarded( + ERA_CHAIN_ID, + keccak256(abi.encode(_originalCaller, _l1Token, _amount)), + txHash + ); + + emit LegacyDepositInitiated({ + chainId: ERA_CHAIN_ID, + l2DepositTxHash: txHash, + from: _originalCaller, + to: _l2Receiver, + l1Asset: _l1Token, + amount: _amount + }); + } + + /// @inheritdoc IL1AssetRouter + function finalizeWithdrawal( + uint256 _chainId, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes calldata _message, + bytes32[] calldata _merkleProof + ) external override { + /// @dev We use a deprecated field to support L2->L1 legacy withdrawals, which were started + /// by the legacy bridge. + address legacyL2Bridge = L1_NULLIFIER.l2BridgeAddress(_chainId); + FinalizeL1DepositParams memory finalizeWithdrawalParams = FinalizeL1DepositParams({ + chainId: _chainId, + l2BatchNumber: _l2BatchNumber, + l2MessageIndex: _l2MessageIndex, + l2Sender: legacyL2Bridge == address(0) ? L2_ASSET_ROUTER_ADDR : legacyL2Bridge, + l2TxNumberInBatch: _l2TxNumberInBatch, + message: _message, + merkleProof: _merkleProof + }); + L1_NULLIFIER.finalizeDeposit(finalizeWithdrawalParams); + } + + /// @dev Withdraw funds from the initiated deposit, that failed when finalizing on L2. + /// @param _depositSender The address of the deposit initiator. + /// @param _l1Token The address of the deposited L1 ERC20 token. + /// @param _amount The amount of the deposit that failed. + /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization. + /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed. + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message. + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent. + /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization. + function claimFailedDeposit( + uint256 _chainId, + address _depositSender, + address _l1Token, + uint256 _amount, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external { + L1_NULLIFIER.claimFailedDeposit({ + _chainId: _chainId, + _depositSender: _depositSender, + _l1Token: _l1Token, + _amount: _amount, + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof + }); + } + + /// @notice Legacy read method, which forwards the call to L1Nullifier to check if withdrawal was finalized + function isWithdrawalFinalized( + uint256 _chainId, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex + ) external view returns (bool) { + return L1_NULLIFIER.isWithdrawalFinalized(_chainId, _l2BatchNumber, _l2MessageIndex); + } + + /// @notice Legacy function to get the L2 shared bridge address for a chain. + /// @dev In case the chain has been deployed after the gateway release, + /// the returned value is 0. + function l2BridgeAddress(uint256 _chainId) external view override returns (address) { + return L1_NULLIFIER.l2BridgeAddress(_chainId); + } +} diff --git a/l1-contracts/contracts/bridge/asset-router/L2AssetRouter.sol b/l1-contracts/contracts/bridge/asset-router/L2AssetRouter.sol new file mode 100644 index 000000000..0a10822f4 --- /dev/null +++ b/l1-contracts/contracts/bridge/asset-router/L2AssetRouter.sol @@ -0,0 +1,342 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IL2AssetRouter} from "./IL2AssetRouter.sol"; +import {IAssetRouterBase} from "./IAssetRouterBase.sol"; +import {AssetRouterBase} from "./AssetRouterBase.sol"; + +import {IL2NativeTokenVault} from "../ntv/IL2NativeTokenVault.sol"; +import {INativeTokenVault} from "../ntv/INativeTokenVault.sol"; +import {IL2SharedBridgeLegacy} from "../interfaces/IL2SharedBridgeLegacy.sol"; +import {IAssetHandler} from "../interfaces/IAssetHandler.sol"; +import {IBridgedStandardToken} from "../interfaces/IBridgedStandardToken.sol"; +import {IL1ERC20Bridge} from "../interfaces/IL1ERC20Bridge.sol"; + +import {IBridgehub} from "../../bridgehub/IBridgehub.sol"; +import {AddressAliasHelper} from "../../vendor/AddressAliasHelper.sol"; + +import {L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR} from "../../common/L2ContractAddresses.sol"; +import {L2ContractHelper} from "../../common/libraries/L2ContractHelper.sol"; +import {DataEncoding} from "../../common/libraries/DataEncoding.sol"; +import {EmptyAddress, InvalidCaller, AmountMustBeGreaterThanZero, AssetIdNotSupported} from "../../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice The "default" bridge implementation for the ERC20 tokens. Note, that it does not +/// support any custom token logic, i.e. rebase tokens' functionality is not supported. +contract L2AssetRouter is AssetRouterBase, IL2AssetRouter { + /// @dev The address of the L2 legacy shared bridge. + address public immutable L2_LEGACY_SHARED_BRIDGE; + + /// @dev The asset id of the base token. + bytes32 public immutable BASE_TOKEN_ASSET_ID; + + /// @dev The address of the L1 asset router counterpart. + address public override l1AssetRouter; + + /// @notice Checks that the message sender is the L1 Asset Router. + modifier onlyAssetRouterCounterpart(uint256 _originChainId) { + if (_originChainId == L1_CHAIN_ID) { + // Only the L1 Asset Router counterpart can initiate and finalize the deposit. + if (AddressAliasHelper.undoL1ToL2Alias(msg.sender) != l1AssetRouter) { + revert InvalidCaller(msg.sender); + } + } else { + revert InvalidCaller(msg.sender); // xL2 messaging not supported for now + } + _; + } + + /// @notice Checks that the message sender is the L1 Asset Router. + modifier onlyAssetRouterCounterpartOrSelf(uint256 _originChainId) { + if (_originChainId == L1_CHAIN_ID) { + // Only the L1 Asset Router counterpart can initiate and finalize the deposit. + if ((AddressAliasHelper.undoL1ToL2Alias(msg.sender) != l1AssetRouter) && (msg.sender != address(this))) { + revert InvalidCaller(msg.sender); + } + } + _; + } + + /// @notice Checks that the message sender is the legacy L2 bridge. + modifier onlyLegacyBridge() { + if (msg.sender != L2_LEGACY_SHARED_BRIDGE) { + revert InvalidCaller(msg.sender); + } + _; + } + + /// @dev Disable the initialization to prevent Parity hack. + /// @param _l1AssetRouter The address of the L1 Bridge contract. + constructor( + uint256 _l1ChainId, + uint256 _eraChainId, + address _l1AssetRouter, + address _legacySharedBridge, + bytes32 _baseTokenAssetId, + address _aliasedOwner + ) AssetRouterBase(_l1ChainId, _eraChainId, IBridgehub(L2_BRIDGEHUB_ADDR)) { + L2_LEGACY_SHARED_BRIDGE = _legacySharedBridge; + if (_l1AssetRouter == address(0)) { + revert EmptyAddress(); + } + l1AssetRouter = _l1AssetRouter; + assetHandlerAddress[_baseTokenAssetId] = L2_NATIVE_TOKEN_VAULT_ADDR; + BASE_TOKEN_ASSET_ID = _baseTokenAssetId; + _disableInitializers(); + _transferOwnership(_aliasedOwner); + } + + /// @inheritdoc IL2AssetRouter + function setAssetHandlerAddress( + uint256 _originChainId, + bytes32 _assetId, + address _assetAddress + ) external override onlyAssetRouterCounterpart(_originChainId) { + assetHandlerAddress[_assetId] = _assetAddress; + emit AssetHandlerRegistered(_assetId, _assetAddress); + } + + /// @inheritdoc IAssetRouterBase + function setAssetHandlerAddressThisChain( + bytes32 _assetRegistrationData, + address _assetHandlerAddress + ) external override(AssetRouterBase, IAssetRouterBase) { + _setAssetHandlerAddressThisChain(L2_NATIVE_TOKEN_VAULT_ADDR, _assetRegistrationData, _assetHandlerAddress); + } + + /*////////////////////////////////////////////////////////////// + Receive transaction Functions + //////////////////////////////////////////////////////////////*/ + + /// @notice Finalize the deposit and mint funds + /// @param _assetId The encoding of the asset on L2 + /// @param _transferData The encoded data required for deposit (address _l1Sender, uint256 _amount, address _l2Receiver, bytes memory erc20Data, address originToken) + function finalizeDeposit( + // solhint-disable-next-line no-unused-vars + uint256, + bytes32 _assetId, + bytes calldata _transferData + ) public override(AssetRouterBase, IAssetRouterBase) onlyAssetRouterCounterpartOrSelf(L1_CHAIN_ID) { + if (_assetId == BASE_TOKEN_ASSET_ID) { + revert AssetIdNotSupported(BASE_TOKEN_ASSET_ID); + } + _finalizeDeposit(L1_CHAIN_ID, _assetId, _transferData, L2_NATIVE_TOKEN_VAULT_ADDR); + + emit DepositFinalizedAssetRouter(L1_CHAIN_ID, _assetId, _transferData); + } + + /*////////////////////////////////////////////////////////////// + Internal & Helpers + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc AssetRouterBase + function _ensureTokenRegisteredWithNTV(address _token) internal override returns (bytes32 assetId) { + IL2NativeTokenVault nativeTokenVault = IL2NativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR); + nativeTokenVault.ensureTokenIsRegistered(_token); + } + + /*////////////////////////////////////////////////////////////// + LEGACY FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @notice Initiates a withdrawal by burning funds on the contract and sending the message to L1 + /// where tokens would be unlocked + /// @dev do not rely on this function, it will be deprecated in the future + /// @param _assetId The asset id of the withdrawn asset + /// @param _assetData The data that is passed to the asset handler contract + function withdraw(bytes32 _assetId, bytes memory _assetData) public override returns (bytes32) { + return _withdrawSender(_assetId, _assetData, msg.sender, true); + } + + function withdrawToken(address _l2NativeToken, bytes memory _assetData) public returns (bytes32) { + bytes32 recordedAssetId = INativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).assetId(_l2NativeToken); + uint256 recordedOriginChainId = INativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).originChainId(recordedAssetId); + if (recordedOriginChainId == L1_CHAIN_ID) { + revert AssetIdNotSupported(recordedAssetId); + } + bytes32 assetId = _ensureTokenRegisteredWithNTV(_l2NativeToken); + return _withdrawSender(assetId, _assetData, msg.sender, true); + } + + /// @notice Initiates a withdrawal by burning funds on the contract and sending the message to L1 + /// where tokens would be unlocked + /// @param _assetId The asset id of the withdrawn asset + /// @param _assetData The data that is passed to the asset handler contract + /// @param _sender The address of the sender of the message + /// @param _alwaysNewMessageFormat Whether to use the new message format compatible with Custom Asset Handlers + function _withdrawSender( + bytes32 _assetId, + bytes memory _assetData, + address _sender, + bool _alwaysNewMessageFormat + ) internal returns (bytes32 txHash) { + address assetHandler = assetHandlerAddress[_assetId]; + bytes memory _l1bridgeMintData = IAssetHandler(assetHandler).bridgeBurn({ + _chainId: L1_CHAIN_ID, + _msgValue: 0, + _assetId: _assetId, + _originalCaller: _sender, + _data: _assetData + }); + + bytes memory message; + if (_alwaysNewMessageFormat || L2_LEGACY_SHARED_BRIDGE == address(0)) { + message = _getAssetRouterWithdrawMessage(_assetId, _l1bridgeMintData); + // slither-disable-next-line unused-return + txHash = L2ContractHelper.sendMessageToL1(message); + } else { + address l1Token = IBridgedStandardToken( + IL2NativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).tokenAddress(_assetId) + ).originToken(); + if (l1Token == address(0)) { + revert AssetIdNotSupported(_assetId); + } + (uint256 amount, address l1Receiver) = abi.decode(_assetData, (uint256, address)); + message = _getSharedBridgeWithdrawMessage(l1Receiver, l1Token, amount); + txHash = IL2SharedBridgeLegacy(L2_LEGACY_SHARED_BRIDGE).sendMessageToL1(message); + } + + emit WithdrawalInitiatedAssetRouter(L1_CHAIN_ID, _sender, _assetId, _assetData); + } + + /// @notice Encodes the message for l2ToL1log sent during withdraw initialization. + /// @param _assetId The encoding of the asset on L2 which is withdrawn. + /// @param _l1bridgeMintData The calldata used by l1 asset handler to unlock tokens for recipient. + function _getAssetRouterWithdrawMessage( + bytes32 _assetId, + bytes memory _l1bridgeMintData + ) internal view returns (bytes memory) { + // solhint-disable-next-line func-named-parameters + return abi.encodePacked(IAssetRouterBase.finalizeDeposit.selector, block.chainid, _assetId, _l1bridgeMintData); + } + + /// @notice Encodes the message for l2ToL1log sent during withdraw initialization. + function _getSharedBridgeWithdrawMessage( + address _l1Receiver, + address _l1Token, + uint256 _amount + ) internal pure returns (bytes memory) { + // solhint-disable-next-line func-named-parameters + return abi.encodePacked(IL1ERC20Bridge.finalizeWithdrawal.selector, _l1Receiver, _l1Token, _amount); + } + + /// @notice Legacy finalizeDeposit. + /// @dev Finalizes the deposit and mint funds. + /// @param _l1Sender The address of token sender on L1. + /// @param _l2Receiver The address of token receiver on L2. + /// @param _l1Token The address of the token transferred. + /// @param _amount The amount of the token transferred. + /// @param _data The metadata of the token transferred. + function finalizeDeposit( + address _l1Sender, + address _l2Receiver, + address _l1Token, + uint256 _amount, + bytes calldata _data + ) external onlyAssetRouterCounterpart(L1_CHAIN_ID) { + _translateLegacyFinalizeDeposit({ + _l1Sender: _l1Sender, + _l2Receiver: _l2Receiver, + _l1Token: _l1Token, + _amount: _amount, + _data: _data + }); + } + + function finalizeDepositLegacyBridge( + address _l1Sender, + address _l2Receiver, + address _l1Token, + uint256 _amount, + bytes calldata _data + ) external onlyLegacyBridge { + _translateLegacyFinalizeDeposit({ + _l1Sender: _l1Sender, + _l2Receiver: _l2Receiver, + _l1Token: _l1Token, + _amount: _amount, + _data: _data + }); + } + + function _translateLegacyFinalizeDeposit( + address _l1Sender, + address _l2Receiver, + address _l1Token, + uint256 _amount, + bytes calldata _data + ) internal { + bytes32 assetId = DataEncoding.encodeNTVAssetId(L1_CHAIN_ID, _l1Token); + // solhint-disable-next-line func-named-parameters + bytes memory data = DataEncoding.encodeBridgeMintData(_l1Sender, _l2Receiver, _l1Token, _amount, _data); + this.finalizeDeposit(L1_CHAIN_ID, assetId, data); + } + + /// @notice Initiates a withdrawal by burning funds on the contract and sending the message to L1 + /// where tokens would be unlocked + /// @dev A compatibility method to support legacy functionality for the SDK. + /// @param _l1Receiver The account address that should receive funds on L1 + /// @param _l2Token The L2 token address which is withdrawn + /// @param _amount The total amount of tokens to be withdrawn + function withdraw(address _l1Receiver, address _l2Token, uint256 _amount) external { + if (_amount == 0) { + revert AmountMustBeGreaterThanZero(); + } + _withdrawLegacy(_l1Receiver, _l2Token, _amount, msg.sender); + } + + /// @notice Legacy withdraw. + /// @dev Finalizes the deposit and mint funds. + /// @param _l1Receiver The address of token receiver on L1. + /// @param _l2Token The address of token on L2. + /// @param _amount The amount of the token transferred. + /// @param _sender The original msg.sender. + function withdrawLegacyBridge( + address _l1Receiver, + address _l2Token, + uint256 _amount, + address _sender + ) external onlyLegacyBridge { + _withdrawLegacy(_l1Receiver, _l2Token, _amount, _sender); + } + + function _withdrawLegacy(address _l1Receiver, address _l2Token, uint256 _amount, address _sender) internal { + bytes32 assetId = DataEncoding.encodeNTVAssetId(L1_CHAIN_ID, l1TokenAddress(_l2Token)); + bytes memory data = abi.encode(_amount, _l1Receiver); + _withdrawSender(assetId, data, _sender, false); + } + + /// @notice Legacy getL1TokenAddress. + /// @param _l2Token The address of token on L2. + /// @return The address of token on L1. + function l1TokenAddress(address _l2Token) public view returns (address) { + return IBridgedStandardToken(_l2Token).l1Address(); + } + + /// @notice Legacy function used for backward compatibility to return L2 wrapped token + /// @notice address corresponding to provided L1 token address and deployed through NTV. + /// @dev However, the shared bridge can use custom asset handlers such that L2 addresses differ, + /// @dev or an L1 token may not have an L2 counterpart. + /// @param _l1Token The address of token on L1. + /// @return Address of an L2 token counterpart + function l2TokenAddress(address _l1Token) public view returns (address) { + IL2NativeTokenVault l2NativeTokenVault = IL2NativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR); + address currentlyDeployedAddress = l2NativeTokenVault.l2TokenAddress(_l1Token); + + if (currentlyDeployedAddress != address(0)) { + return currentlyDeployedAddress; + } + + // For backwards compatibility, the bridge smust return the address of the token even if it + // has not been deployed yet. + return l2NativeTokenVault.calculateCreate2TokenAddress(L1_CHAIN_ID, _l1Token); + } + + /// @notice Returns the address of the L1 asset router. + /// @dev The old name is kept for backward compatibility. + function l1Bridge() external view returns (address) { + return l1AssetRouter; + } +} diff --git a/l1-contracts/contracts/bridge/interfaces/IAssetHandler.sol b/l1-contracts/contracts/bridge/interfaces/IAssetHandler.sol new file mode 100644 index 000000000..57f58eb59 --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IAssetHandler.sol @@ -0,0 +1,45 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @title Asset Handler contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice Used for any asset handler and called by the AssetRouter +interface IAssetHandler { + /// @dev Emitted when a new token is initialized + event BridgeInitialize(address indexed token, string name, string symbol, uint8 decimals); + + /// @dev Emitted when a token is minted + event BridgeMint(uint256 indexed chainId, bytes32 indexed assetId, address receiver, uint256 amount); + + /// @dev Emitted when a token is burned + event BridgeBurn( + uint256 indexed chainId, + bytes32 indexed assetId, + address indexed sender, + address receiver, + uint256 amount + ); + + /// @param _chainId the chainId that the message is from + /// @param _assetId the assetId of the asset being bridged + /// @param _data the actual data specified for the function + function bridgeMint(uint256 _chainId, bytes32 _assetId, bytes calldata _data) external payable; + + /// @notice Burns bridged tokens and returns the calldata for L2 -> L1 message. + /// @dev In case of native token vault _data is the tuple of _depositAmount and _l2Receiver. + /// @param _chainId the chainId that the message will be sent to + /// @param _msgValue the msg.value of the L2 transaction. For now it is always 0. + /// @param _assetId the assetId of the asset being bridged + /// @param _originalCaller the original caller of the + /// @param _data the actual data specified for the function + /// @return _bridgeMintData The calldata used by counterpart asset handler to unlock tokens for recipient. + function bridgeBurn( + uint256 _chainId, + uint256 _msgValue, + bytes32 _assetId, + address _originalCaller, + bytes calldata _data + ) external payable returns (bytes memory _bridgeMintData); +} diff --git a/l1-contracts/contracts/bridge/interfaces/IBridgedStandardToken.sol b/l1-contracts/contracts/bridge/interfaces/IBridgedStandardToken.sol new file mode 100644 index 000000000..2ba2a081b --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IBridgedStandardToken.sol @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +interface IBridgedStandardToken { + event BridgeInitialize(address indexed l1Token, string name, string symbol, uint8 decimals); + + event BridgeMint(address indexed account, uint256 amount); + + event BridgeBurn(address indexed account, uint256 amount); + + function bridgeMint(address _account, uint256 _amount) external; + + function bridgeBurn(address _account, uint256 _amount) external; + + function l1Address() external view returns (address); + + function originToken() external view returns (address); + + function l2Bridge() external view returns (address); + + function assetId() external view returns (bytes32); + + function nativeTokenVault() external view returns (address); +} diff --git a/l1-contracts/contracts/bridge/interfaces/IL1AssetDeploymentTracker.sol b/l1-contracts/contracts/bridge/interfaces/IL1AssetDeploymentTracker.sol new file mode 100644 index 000000000..6fb6538b6 --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IL1AssetDeploymentTracker.sol @@ -0,0 +1,14 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL1AssetDeploymentTracker { + function bridgeCheckCounterpartAddress( + uint256 _chainId, + bytes32 _assetId, + address _originalCaller, + address _assetHandlerAddressOnCounterpart + ) external view; +} diff --git a/l1-contracts/contracts/bridge/interfaces/IL1AssetHandler.sol b/l1-contracts/contracts/bridge/interfaces/IL1AssetHandler.sol new file mode 100644 index 000000000..c62dce3da --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IL1AssetHandler.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @title L1 Asset Handler contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice Used for any asset handler and called by the L1AssetRouter +interface IL1AssetHandler { + /// @param _chainId the chainId that the message will be sent to + /// @param _assetId the assetId of the asset being bridged + /// @param _depositSender the address of the entity that initiated the deposit. + /// @param _data the actual data specified for the function + function bridgeRecoverFailedTransfer( + uint256 _chainId, + bytes32 _assetId, + address _depositSender, + bytes calldata _data + ) external payable; +} diff --git a/l1-contracts/contracts/bridge/interfaces/IL1BaseTokenAssetHandler.sol b/l1-contracts/contracts/bridge/interfaces/IL1BaseTokenAssetHandler.sol new file mode 100644 index 000000000..1e8d08bdd --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IL1BaseTokenAssetHandler.sol @@ -0,0 +1,12 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @title L1 Base Token Asset Handler contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice Used for any asset handler and called by the L1AssetRouter +interface IL1BaseTokenAssetHandler { + /// @notice Used to get the token address of an assetId + function tokenAddress(bytes32 _assetId) external view returns (address); +} diff --git a/l1-contracts/contracts/bridge/interfaces/IL1ERC20Bridge.sol b/l1-contracts/contracts/bridge/interfaces/IL1ERC20Bridge.sol index f7dd7b07b..fcba5da5a 100644 --- a/l1-contracts/contracts/bridge/interfaces/IL1ERC20Bridge.sol +++ b/l1-contracts/contracts/bridge/interfaces/IL1ERC20Bridge.sol @@ -1,13 +1,15 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {IL1SharedBridge} from "./IL1SharedBridge.sol"; +import {IL1Nullifier} from "./IL1Nullifier.sol"; +import {IL1NativeTokenVault} from "../ntv/IL1NativeTokenVault.sol"; +import {IL1AssetRouter} from "../asset-router/IL1AssetRouter.sol"; /// @title L1 Bridge contract legacy interface /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -/// @notice Legacy Bridge interface before hyperchain migration, used for backward compatibility with zkSync Era +/// @notice Legacy Bridge interface before ZK chain migration, used for backward compatibility with ZKsync Era interface IL1ERC20Bridge { event DepositInitiated( bytes32 indexed l2DepositTxHash, @@ -60,7 +62,11 @@ interface IL1ERC20Bridge { function l2TokenAddress(address _l1Token) external view returns (address); - function SHARED_BRIDGE() external view returns (IL1SharedBridge); + function L1_NULLIFIER() external view returns (IL1Nullifier); + + function L1_ASSET_ROUTER() external view returns (IL1AssetRouter); + + function L1_NATIVE_TOKEN_VAULT() external view returns (IL1NativeTokenVault); function l2TokenBeacon() external view returns (address); @@ -70,7 +76,5 @@ interface IL1ERC20Bridge { address _account, address _l1Token, bytes32 _depositL2TxHash - ) external returns (uint256 amount); - - function transferTokenToSharedBridge(address _token) external; + ) external view returns (uint256 amount); } diff --git a/l1-contracts/contracts/bridge/interfaces/IL1Nullifier.sol b/l1-contracts/contracts/bridge/interfaces/IL1Nullifier.sol new file mode 100644 index 000000000..61bf38516 --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IL1Nullifier.sol @@ -0,0 +1,111 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IBridgehub} from "../../bridgehub/IBridgehub.sol"; +import {IL1NativeTokenVault} from "../ntv/IL1NativeTokenVault.sol"; +import {IL1ERC20Bridge} from "./IL1ERC20Bridge.sol"; + +/// @param chainId The chain ID of the transaction to check. +/// @param l2BatchNumber The L2 batch number where the withdrawal was processed. +/// @param l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message. +/// @param l2sender The address of the message sender on L2 (base token system contract address or asset handler) +/// @param l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent. +/// @param message The L2 withdraw data, stored in an L2 -> L1 message. +/// @param merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization. +struct FinalizeL1DepositParams { + uint256 chainId; + uint256 l2BatchNumber; + uint256 l2MessageIndex; + address l2Sender; + uint16 l2TxNumberInBatch; + bytes message; + bytes32[] merkleProof; +} + +/// @title L1 Bridge contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL1Nullifier { + event BridgehubDepositFinalized( + uint256 indexed chainId, + bytes32 indexed txDataHash, + bytes32 indexed l2DepositTxHash + ); + + function isWithdrawalFinalized( + uint256 _chainId, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex + ) external view returns (bool); + + function claimFailedDepositLegacyErc20Bridge( + address _depositSender, + address _l1Token, + uint256 _amount, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external; + + function claimFailedDeposit( + uint256 _chainId, + address _depositSender, + address _l1Token, + uint256 _amount, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external; + + function finalizeDeposit(FinalizeL1DepositParams calldata _finalizeWithdrawalParams) external; + + function BRIDGE_HUB() external view returns (IBridgehub); + + function legacyBridge() external view returns (IL1ERC20Bridge); + + function depositHappened(uint256 _chainId, bytes32 _l2TxHash) external view returns (bytes32); + + function bridgehubConfirmL2TransactionForwarded(uint256 _chainId, bytes32 _txDataHash, bytes32 _txHash) external; + + function l1NativeTokenVault() external view returns (IL1NativeTokenVault); + + function setL1NativeTokenVault(IL1NativeTokenVault _nativeTokenVault) external; + + function setL1AssetRouter(address _l1AssetRouter) external; + + function chainBalance(uint256 _chainId, address _token) external view returns (uint256); + + function l2BridgeAddress(uint256 _chainId) external view returns (address); + + function transferTokenToNTV(address _token) external; + + function nullifyChainBalanceByNTV(uint256 _chainId, address _token) external; + + /// @dev Withdraw funds from the initiated deposit, that failed when finalizing on L2. + /// @param _chainId The ZK chain id to which deposit was initiated. + /// @param _depositSender The address of the entity that initiated the deposit. + /// @param _assetId The unique identifier of the deposited L1 token. + /// @param _assetData The encoded transfer data, which includes both the deposit amount and the address of the L2 receiver. Might include extra information. + /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization. + /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed. + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message. + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent. + /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization. + /// @dev Processes claims of failed deposit, whether they originated from the legacy bridge or the current system. + function bridgeRecoverFailedTransfer( + uint256 _chainId, + address _depositSender, + bytes32 _assetId, + bytes memory _assetData, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof + ) external; +} diff --git a/l1-contracts/contracts/bridge/interfaces/IL1SharedBridge.sol b/l1-contracts/contracts/bridge/interfaces/IL1SharedBridge.sol deleted file mode 100644 index ef7e74165..000000000 --- a/l1-contracts/contracts/bridge/interfaces/IL1SharedBridge.sol +++ /dev/null @@ -1,169 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {L2TransactionRequestTwoBridgesInner} from "../../bridgehub/IBridgehub.sol"; -import {IBridgehub} from "../../bridgehub/IBridgehub.sol"; -import {IL1ERC20Bridge} from "./IL1ERC20Bridge.sol"; - -/// @title L1 Bridge contract interface -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -interface IL1SharedBridge { - /// @notice pendingAdmin is changed - /// @dev Also emitted when new admin is accepted and in this case, `newPendingAdmin` would be zero address - event NewPendingAdmin(address indexed oldPendingAdmin, address indexed newPendingAdmin); - - /// @notice Admin changed - event NewAdmin(address indexed oldAdmin, address indexed newAdmin); - - event LegacyDepositInitiated( - uint256 indexed chainId, - bytes32 indexed l2DepositTxHash, - address indexed from, - address to, - address l1Token, - uint256 amount - ); - - event BridgehubDepositInitiated( - uint256 indexed chainId, - bytes32 indexed txDataHash, - address indexed from, - address to, - address l1Token, - uint256 amount - ); - - event BridgehubDepositBaseTokenInitiated( - uint256 indexed chainId, - address indexed from, - address l1Token, - uint256 amount - ); - - event BridgehubDepositFinalized( - uint256 indexed chainId, - bytes32 indexed txDataHash, - bytes32 indexed l2DepositTxHash - ); - - event WithdrawalFinalizedSharedBridge( - uint256 indexed chainId, - address indexed to, - address indexed l1Token, - uint256 amount - ); - - event ClaimedFailedDepositSharedBridge( - uint256 indexed chainId, - address indexed to, - address indexed l1Token, - uint256 amount - ); - - function isWithdrawalFinalized( - uint256 _chainId, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex - ) external view returns (bool); - - function depositLegacyErc20Bridge( - address _msgSender, - address _l2Receiver, - address _l1Token, - uint256 _amount, - uint256 _l2TxGasLimit, - uint256 _l2TxGasPerPubdataByte, - address _refundRecipient - ) external payable returns (bytes32 txHash); - - function claimFailedDepositLegacyErc20Bridge( - address _depositSender, - address _l1Token, - uint256 _amount, - bytes32 _l2TxHash, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes32[] calldata _merkleProof - ) external; - - function claimFailedDeposit( - uint256 _chainId, - address _depositSender, - address _l1Token, - uint256 _amount, - bytes32 _l2TxHash, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes32[] calldata _merkleProof - ) external; - - function finalizeWithdrawalLegacyErc20Bridge( - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external returns (address l1Receiver, address l1Token, uint256 amount); - - function finalizeWithdrawal( - uint256 _chainId, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external; - - function setEraPostDiamondUpgradeFirstBatch(uint256 _eraPostDiamondUpgradeFirstBatch) external; - - function setEraPostLegacyBridgeUpgradeFirstBatch(uint256 _eraPostLegacyBridgeUpgradeFirstBatch) external; - - function setEraLegacyBridgeLastDepositTime( - uint256 _eraLegacyBridgeLastDepositBatch, - uint256 _eraLegacyBridgeLastDepositTxNumber - ) external; - - function L1_WETH_TOKEN() external view returns (address); - - function BRIDGE_HUB() external view returns (IBridgehub); - - function legacyBridge() external view returns (IL1ERC20Bridge); - - function l2BridgeAddress(uint256 _chainId) external view returns (address); - - function depositHappened(uint256 _chainId, bytes32 _l2TxHash) external view returns (bytes32); - - /// data is abi encoded : - /// address _l1Token, - /// uint256 _amount, - /// address _l2Receiver - function bridgehubDeposit( - uint256 _chainId, - address _prevMsgSender, - uint256 _l2Value, - bytes calldata _data - ) external payable returns (L2TransactionRequestTwoBridgesInner memory request); - - function bridgehubDepositBaseToken( - uint256 _chainId, - address _prevMsgSender, - address _l1Token, - uint256 _amount - ) external payable; - - function bridgehubConfirmL2Transaction(uint256 _chainId, bytes32 _txDataHash, bytes32 _txHash) external; - - function receiveEth(uint256 _chainId) external payable; - - /// @notice Starts the transfer of admin rights. Only the current admin can propose a new pending one. - /// @notice New admin can accept admin rights by calling `acceptAdmin` function. - /// @param _newPendingAdmin Address of the new admin - function setPendingAdmin(address _newPendingAdmin) external; - - /// @notice Accepts transfer of admin rights. Only pending admin can accept the role. - function acceptAdmin() external; -} diff --git a/l1-contracts/contracts/bridge/interfaces/IL1SharedBridgeLegacy.sol b/l1-contracts/contracts/bridge/interfaces/IL1SharedBridgeLegacy.sol new file mode 100644 index 000000000..43fca83a3 --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IL1SharedBridgeLegacy.sol @@ -0,0 +1,10 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @title L1 Bridge contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL1SharedBridgeLegacy { + function l2BridgeAddress(uint256 _chainId) external view returns (address); +} diff --git a/l1-contracts/contracts/bridge/interfaces/IL2Bridge.sol b/l1-contracts/contracts/bridge/interfaces/IL2Bridge.sol index eb21b4f25..7fe7b7a97 100644 --- a/l1-contracts/contracts/bridge/interfaces/IL2Bridge.sol +++ b/l1-contracts/contracts/bridge/interfaces/IL2Bridge.sol @@ -1,22 +1,15 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev interface IL2Bridge { - function finalizeDeposit( - address _l1Sender, - address _l2Receiver, - address _l1Token, - uint256 _amount, - bytes calldata _data - ) external; - - function withdraw(address _l1Receiver, address _l2Token, uint256 _amount) external; + function withdraw(bytes32 _assetId, bytes memory _assetData) external; - function l1TokenAddress(address _l2Token) external view returns (address); - - function l2TokenAddress(address _l1Token) external view returns (address); + function finalizeDeposit(bytes32 _assetId, bytes calldata _transferData) external; function l1Bridge() external view returns (address); + + function setAssetHandlerAddress(bytes32 _assetId, address _assetAddress) external; } diff --git a/l1-contracts/contracts/bridge/interfaces/IL2SharedBridgeLegacy.sol b/l1-contracts/contracts/bridge/interfaces/IL2SharedBridgeLegacy.sol new file mode 100644 index 000000000..71c7a46c5 --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IL2SharedBridgeLegacy.sol @@ -0,0 +1,32 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL2SharedBridgeLegacy { + event FinalizeDeposit( + address indexed l1Sender, + address indexed l2Receiver, + address indexed l2Token, + uint256 amount + ); + + function l2TokenBeacon() external returns (UpgradeableBeacon); + + function withdraw(address _l1Receiver, address _l2Token, uint256 _amount) external; + + function l1TokenAddress(address _l2Token) external view returns (address); + + function l2TokenAddress(address _l1Token) external view returns (address); + + function l1Bridge() external view returns (address); + + function l1SharedBridge() external view returns (address); + + function deployBeaconProxy(bytes32 _salt) external returns (address); + + function sendMessageToL1(bytes calldata _message) external returns (bytes32); +} diff --git a/l1-contracts/contracts/bridge/interfaces/IL2SharedBridgeLegacyFunctions.sol b/l1-contracts/contracts/bridge/interfaces/IL2SharedBridgeLegacyFunctions.sol new file mode 100644 index 000000000..42c8f7759 --- /dev/null +++ b/l1-contracts/contracts/bridge/interfaces/IL2SharedBridgeLegacyFunctions.sol @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +/// @author Matter Labs +interface IL2SharedBridgeLegacyFunctions { + event FinalizeDeposit( + address indexed l1Sender, + address indexed l2Receiver, + address indexed l2Token, + uint256 amount + ); + + event WithdrawalInitiated( + address indexed l2Sender, + address indexed l1Receiver, + address indexed l2Token, + uint256 amount + ); + + function finalizeDeposit( + address _l1Sender, + address _l2Receiver, + address _l1Token, + uint256 _amount, + bytes calldata _data + ) external; +} diff --git a/l2-contracts/contracts/bridge/interfaces/IL2WrappedBaseToken.sol b/l1-contracts/contracts/bridge/interfaces/IL2WrappedBaseToken.sol similarity index 65% rename from l2-contracts/contracts/bridge/interfaces/IL2WrappedBaseToken.sol rename to l1-contracts/contracts/bridge/interfaces/IL2WrappedBaseToken.sol index 693aa139a..ae7e1a916 100644 --- a/l2-contracts/contracts/bridge/interfaces/IL2WrappedBaseToken.sol +++ b/l1-contracts/contracts/bridge/interfaces/IL2WrappedBaseToken.sol @@ -1,5 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; interface IL2WrappedBaseToken { event Initialize(string name, string symbol, uint8 decimals); diff --git a/l1-contracts/contracts/bridge/interfaces/IWETH9.sol b/l1-contracts/contracts/bridge/interfaces/IWETH9.sol index d8e99dc7a..e1536f4fb 100644 --- a/l1-contracts/contracts/bridge/interfaces/IWETH9.sol +++ b/l1-contracts/contracts/bridge/interfaces/IWETH9.sol @@ -1,5 +1,6 @@ // SPDX-License-Identifier: Apache-2.0 -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; interface IWETH9 { function deposit() external payable; diff --git a/l1-contracts/contracts/bridge/ntv/IL1NativeTokenVault.sol b/l1-contracts/contracts/bridge/ntv/IL1NativeTokenVault.sol new file mode 100644 index 000000000..a3fcbe917 --- /dev/null +++ b/l1-contracts/contracts/bridge/ntv/IL1NativeTokenVault.sol @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IL1Nullifier} from "../interfaces/IL1Nullifier.sol"; +import {INativeTokenVault} from "./INativeTokenVault.sol"; +import {IL1AssetDeploymentTracker} from "../interfaces/IL1AssetDeploymentTracker.sol"; + +/// @title L1 Native token vault contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice The NTV is an Asset Handler for the L1AssetRouter to handle native tokens +// is IL1AssetHandler, IL1BaseTokenAssetHandler { +interface IL1NativeTokenVault is INativeTokenVault, IL1AssetDeploymentTracker { + /// @notice The L1Nullifier contract + function L1_NULLIFIER() external view returns (IL1Nullifier); + + /// @notice Returns the total number of specific tokens locked for some chain + function chainBalance(uint256 _chainId, bytes32 _assetId) external view returns (uint256); + + /// @notice Registers ETH token + function registerEthToken() external; + + event TokenBeaconUpdated(address indexed l2TokenBeacon); +} diff --git a/l1-contracts/contracts/bridge/ntv/IL2NativeTokenVault.sol b/l1-contracts/contracts/bridge/ntv/IL2NativeTokenVault.sol new file mode 100644 index 000000000..8938a8c28 --- /dev/null +++ b/l1-contracts/contracts/bridge/ntv/IL2NativeTokenVault.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +import {INativeTokenVault} from "./INativeTokenVault.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL2NativeTokenVault is INativeTokenVault { + event FinalizeDeposit( + address indexed l1Sender, + address indexed l2Receiver, + address indexed l2Token, + uint256 amount + ); + + event WithdrawalInitiated( + address indexed l2Sender, + address indexed l1Receiver, + address indexed l2Token, + uint256 amount + ); + + event L2TokenBeaconUpdated(address indexed l2TokenBeacon, bytes32 indexed l2TokenProxyBytecodeHash); + + function l2TokenAddress(address _l1Token) external view returns (address); +} diff --git a/l1-contracts/contracts/bridge/ntv/INativeTokenVault.sol b/l1-contracts/contracts/bridge/ntv/INativeTokenVault.sol new file mode 100644 index 000000000..12718bd6f --- /dev/null +++ b/l1-contracts/contracts/bridge/ntv/INativeTokenVault.sol @@ -0,0 +1,47 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IAssetRouterBase} from "../asset-router/IAssetRouterBase.sol"; + +/// @title Base Native token vault contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice The NTV is an Asset Handler for the L1AssetRouter to handle native tokens +interface INativeTokenVault { + event BridgedTokenBeaconUpdated(address bridgedTokenBeacon, bytes32 bridgedTokenProxyBytecodeHash); + + /// @notice The Weth token address + function WETH_TOKEN() external view returns (address); + + /// @notice The AssetRouter contract + function ASSET_ROUTER() external view returns (IAssetRouterBase); + + /// @notice The chain ID of the L1 chain + function L1_CHAIN_ID() external view returns (uint256); + + /// @notice Returns the chain ID of the origin chain for a given asset ID + function originChainId(bytes32 assetId) external view returns (uint256); + + /// @notice Registers tokens within the NTV. + /// @dev The goal is to allow bridging native tokens automatically, by registering them on the fly. + /// @notice Allows the bridge to register a token address for the vault. + /// @notice No access control is ok, since the bridging of tokens should be permissionless. This requires permissionless registration. + function registerToken(address _l1Token) external; + + /// @notice Ensures that the native token is registered with the NTV. + /// @dev This function is used to ensure that the token is registered with the NTV. + function ensureTokenIsRegistered(address _nativeToken) external; + + /// @notice Used to get the the ERC20 data for a token + function getERC20Getters(address _token, uint256 _originChainId) external view returns (bytes memory); + + /// @notice Used to get the token address of an assetId + function tokenAddress(bytes32 assetId) external view returns (address); + + /// @notice Used to get the assetId of a token + function assetId(address token) external view returns (bytes32); + + /// @notice Used to get the expected bridged token address corresponding to its native counterpart + function calculateCreate2TokenAddress(uint256 _originChainId, address _originToken) external view returns (address); +} diff --git a/l1-contracts/contracts/bridge/ntv/L1NativeTokenVault.sol b/l1-contracts/contracts/bridge/ntv/L1NativeTokenVault.sol new file mode 100644 index 000000000..d5b059ae6 --- /dev/null +++ b/l1-contracts/contracts/bridge/ntv/L1NativeTokenVault.sol @@ -0,0 +1,311 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable reason-string, gas-custom-errors + +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; +import {IBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/IBeacon.sol"; +import {Create2} from "@openzeppelin/contracts-v4/utils/Create2.sol"; + +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; + +import {IL1NativeTokenVault} from "./IL1NativeTokenVault.sol"; +import {INativeTokenVault} from "./INativeTokenVault.sol"; +import {NativeTokenVault} from "./NativeTokenVault.sol"; + +import {IL1AssetHandler} from "../interfaces/IL1AssetHandler.sol"; +import {IL1Nullifier} from "../interfaces/IL1Nullifier.sol"; +import {IBridgedStandardToken} from "../interfaces/IBridgedStandardToken.sol"; +import {IL1AssetRouter} from "../asset-router/IL1AssetRouter.sol"; + +import {ETH_TOKEN_ADDRESS} from "../../common/Config.sol"; +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "../../common/L2ContractAddresses.sol"; +import {DataEncoding} from "../../common/libraries/DataEncoding.sol"; + +import {Unauthorized, ZeroAddress, NoFundsTransferred, InsufficientChainBalance, WithdrawFailed, OriginChainIdNotFound} from "../../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Vault holding L1 native ETH and ERC20 tokens bridged into the ZK chains. +/// @dev Designed for use with a proxy for upgradability. +contract L1NativeTokenVault is IL1NativeTokenVault, IL1AssetHandler, NativeTokenVault { + using SafeERC20 for IERC20; + + /// @dev L1 nullifier contract that handles legacy functions & finalize withdrawal, confirm l2 tx mappings + IL1Nullifier public immutable override L1_NULLIFIER; + + /// @dev Era's chainID + uint256 public immutable ERA_CHAIN_ID; + + /// @dev Maps token balances for each chain to prevent unauthorized spending across ZK chains. + /// This serves as a security measure until hyperbridging is implemented. + /// NOTE: this function may be removed in the future, don't rely on it! + mapping(uint256 chainId => mapping(bytes32 assetId => uint256 balance)) public chainBalance; + + /// @dev Contract is expected to be used as proxy implementation. + /// @dev Initialize the implementation to prevent Parity hack. + /// @param _l1WethAddress Address of WETH on deployed chain + /// @param _l1AssetRouter Address of Asset Router on L1. + /// @param _eraChainId ID of Era. + /// @param _l1Nullifier Address of the nullifier contract, which handles transaction progress between L1 and ZK chains. + constructor( + address _l1WethAddress, + address _l1AssetRouter, + uint256 _eraChainId, + IL1Nullifier _l1Nullifier + ) + NativeTokenVault( + _l1WethAddress, + _l1AssetRouter, + DataEncoding.encodeNTVAssetId(block.chainid, ETH_TOKEN_ADDRESS), + block.chainid + ) + { + ERA_CHAIN_ID = _eraChainId; + L1_NULLIFIER = _l1Nullifier; + } + + /// @dev Accepts ether only from the contract that was the shared Bridge. + receive() external payable { + if ((address(L1_NULLIFIER) != msg.sender) && (address(ASSET_ROUTER) != msg.sender)) { + revert Unauthorized(msg.sender); + } + } + + /// @dev Initializes a contract for later use. Expected to be used in the proxy + /// @param _owner Address which can change pause / unpause the NTV + /// implementation. The owner is the Governor and separate from the ProxyAdmin from now on, so that the Governor can call the bridge. + function initialize(address _owner, address _bridgedTokenBeacon) external initializer { + if (_owner == address(0)) { + revert ZeroAddress(); + } + bridgedTokenBeacon = IBeacon(_bridgedTokenBeacon); + _transferOwnership(_owner); + } + + /// @inheritdoc IL1NativeTokenVault + function registerEthToken() external { + _unsafeRegisterNativeToken(ETH_TOKEN_ADDRESS); + } + + /// @notice Transfers tokens from shared bridge as part of the migration process. + /// The shared bridge becomes the L1Nullifier contract. + /// @dev Both ETH and ERC20 tokens can be transferred. Exhausts balance of shared bridge after the first call. + /// @dev Calling second time for the same token will revert. + /// @param _token The address of token to be transferred (address(1) for ether and contract address for ERC20). + function transferFundsFromSharedBridge(address _token) external { + ensureTokenIsRegistered(_token); + if (_token == ETH_TOKEN_ADDRESS) { + uint256 balanceBefore = address(this).balance; + L1_NULLIFIER.transferTokenToNTV(_token); + uint256 balanceAfter = address(this).balance; + if (balanceAfter <= balanceBefore) { + revert NoFundsTransferred(); + } + } else { + uint256 balanceBefore = IERC20(_token).balanceOf(address(this)); + uint256 nullifierChainBalance = IERC20(_token).balanceOf(address(L1_NULLIFIER)); + require(nullifierChainBalance > 0, "NTV: 0 amount to transfer"); + L1_NULLIFIER.transferTokenToNTV(_token); + uint256 balanceAfter = IERC20(_token).balanceOf(address(this)); + require(balanceAfter - balanceBefore >= nullifierChainBalance, "NTV: wrong amount transferred"); + } + } + + /// @notice Updates chain token balance within NTV to account for tokens transferred from the shared bridge (part of the migration process). + /// @dev Clears chain balance on the shared bridge after the first call. Subsequent calls will not affect the state. + /// @param _token The address of token to be transferred (address(1) for ether and contract address for ERC20). + /// @param _targetChainId The chain ID of the corresponding ZK chain. + function updateChainBalancesFromSharedBridge(address _token, uint256 _targetChainId) external { + uint256 nullifierChainBalance = L1_NULLIFIER.chainBalance(_targetChainId, _token); + bytes32 assetId = DataEncoding.encodeNTVAssetId(block.chainid, _token); + chainBalance[_targetChainId][assetId] = chainBalance[_targetChainId][assetId] + nullifierChainBalance; + originChainId[assetId] = block.chainid; + L1_NULLIFIER.nullifyChainBalanceByNTV(_targetChainId, _token); + } + + /// @notice Used to register the Asset Handler asset in L2 AssetRouter. + /// @param _assetHandlerAddressOnCounterpart the address of the asset handler on the counterpart chain. + function bridgeCheckCounterpartAddress( + uint256, + bytes32, + address, + address _assetHandlerAddressOnCounterpart + ) external view override onlyAssetRouter { + require(_assetHandlerAddressOnCounterpart == L2_NATIVE_TOKEN_VAULT_ADDR, "NTV: wrong counterpart"); + } + + function _getOriginChainId(bytes32 _assetId) internal view returns (uint256) { + uint256 chainId = originChainId[_assetId]; + if (chainId != 0) { + return chainId; + } else { + address token = tokenAddress[_assetId]; + if (token == ETH_TOKEN_ADDRESS) { + return block.chainid; + } else if (IERC20(token).balanceOf(address(this)) > 0) { + return block.chainid; + } else if (IERC20(token).balanceOf(address(L1_NULLIFIER)) > 0) { + return block.chainid; + } else { + return 0; + } + } + } + + /*////////////////////////////////////////////////////////////// + Start transaction Functions + //////////////////////////////////////////////////////////////*/ + + function _bridgeBurnNativeToken( + uint256 _chainId, + bytes32 _assetId, + address _originalCaller, + // solhint-disable-next-line no-unused-vars + bool _depositChecked, + bytes calldata _data + ) internal override returns (bytes memory _bridgeMintData) { + uint256 _depositAmount; + (_depositAmount, ) = abi.decode(_data, (uint256, address)); + bool depositChecked = IL1AssetRouter(address(ASSET_ROUTER)).transferFundsToNTV( + _assetId, + _depositAmount, + _originalCaller + ); + _bridgeMintData = super._bridgeBurnNativeToken({ + _chainId: _chainId, + _assetId: _assetId, + _originalCaller: _originalCaller, + _depositChecked: depositChecked, + _data: _data + }); + } + + /*////////////////////////////////////////////////////////////// + L1 SPECIFIC FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IL1AssetHandler + function bridgeRecoverFailedTransfer( + uint256 _chainId, + bytes32 _assetId, + address _depositSender, + bytes calldata _data + ) external payable override onlyAssetRouter whenNotPaused { + (uint256 _amount, ) = abi.decode(_data, (uint256, address)); + address l1Token = tokenAddress[_assetId]; + if (_amount == 0) { + revert NoFundsTransferred(); + } + + _handleChainBalanceDecrease(_chainId, _assetId, _amount, false); + + if (l1Token == ETH_TOKEN_ADDRESS) { + bool callSuccess; + // Low-level assembly call, to avoid any memory copying (save gas) + assembly { + callSuccess := call(gas(), _depositSender, _amount, 0, 0, 0, 0) + } + require(callSuccess, "NTV: claimFailedDeposit failed, no funds or cannot transfer to receiver"); + } else { + uint256 originChainId = _getOriginChainId(_assetId); + if (originChainId == block.chainid) { + IERC20(l1Token).safeTransfer(_depositSender, _amount); + } else if (originChainId != 0) { + IBridgedStandardToken(l1Token).bridgeMint(_depositSender, _amount); + } else { + revert OriginChainIdNotFound(); + } + // Note we don't allow weth deposits anymore, but there might be legacy weth deposits. + // until we add Weth bridging capabilities, we don't wrap/unwrap weth to ether. + } + } + + /*////////////////////////////////////////////////////////////// + INTERNAL & HELPER FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + // get the computed address before the contract DeployWithCreate2 deployed using Bytecode of contract DeployWithCreate2 and salt specified by the sender + function calculateCreate2TokenAddress( + uint256 _originChainId, + address _l1Token + ) public view override(INativeTokenVault, NativeTokenVault) returns (address) { + bytes32 salt = _getCreate2Salt(_originChainId, _l1Token); + return + Create2.computeAddress( + salt, + keccak256(abi.encodePacked(type(BeaconProxy).creationCode, abi.encode(bridgedTokenBeacon, ""))) + ); + } + + /// @notice Transfers tokens from the depositor address to the smart contract address. + /// @param _from The address of the depositor. + /// @param _token The ERC20 token to be transferred. + /// @param _amount The amount to be transferred. + /// @return The difference between the contract balance before and after the transferring of funds. + function _depositFunds(address _from, IERC20 _token, uint256 _amount) internal override returns (uint256) { + address from = _from; + // in the legacy scenario the SharedBridge = L1Nullifier was granting the allowance, we have to transfer from them instead of the user + if ( + _token.allowance(address(ASSET_ROUTER), address(this)) >= _amount && + _token.allowance(_from, address(this)) < _amount + ) { + from = address(ASSET_ROUTER); + } + return super._depositFunds(from, _token, _amount); + } + + function _withdrawFunds(bytes32 _assetId, address _to, address _token, uint256 _amount) internal override { + if (_assetId == BASE_TOKEN_ASSET_ID) { + bool callSuccess; + // Low-level assembly call, to avoid any memory copying (save gas) + assembly { + callSuccess := call(gas(), _to, _amount, 0, 0, 0, 0) + } + if (!callSuccess) { + revert WithdrawFailed(); + } + } else { + // Withdraw funds + IERC20(_token).safeTransfer(_to, _amount); + } + } + + function _deployBeaconProxy(bytes32 _salt) internal override returns (BeaconProxy proxy) { + // Use CREATE2 to deploy the BeaconProxy + address proxyAddress = Create2.deploy( + 0, + _salt, + abi.encodePacked(type(BeaconProxy).creationCode, abi.encode(bridgedTokenBeacon, "")) + ); + return BeaconProxy(payable(proxyAddress)); + } + + function _handleChainBalanceIncrease( + uint256 _chainId, + bytes32 _assetId, + uint256 _amount, + bool _isNative + ) internal override { + if ((_isNative) || (originChainId[_assetId] != _chainId)) { + chainBalance[_chainId][_assetId] += _amount; + } + } + + function _handleChainBalanceDecrease( + uint256 _chainId, + bytes32 _assetId, + uint256 _amount, + bool _isNative + ) internal override { + if ((_isNative) || (originChainId[_assetId] != _chainId)) { + // Check that the chain has sufficient balance + if (chainBalance[_chainId][_assetId] < _amount) { + revert InsufficientChainBalance(); + } + chainBalance[_chainId][_assetId] -= _amount; + } + } +} diff --git a/l1-contracts/contracts/bridge/ntv/L2NativeTokenVault.sol b/l1-contracts/contracts/bridge/ntv/L2NativeTokenVault.sol new file mode 100644 index 000000000..4b83037a7 --- /dev/null +++ b/l1-contracts/contracts/bridge/ntv/L2NativeTokenVault.sol @@ -0,0 +1,232 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; +import {IBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/IBeacon.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; + +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; + +import {INativeTokenVault} from "./INativeTokenVault.sol"; +import {IL2NativeTokenVault} from "./IL2NativeTokenVault.sol"; +import {NativeTokenVault} from "./NativeTokenVault.sol"; + +import {IL2SharedBridgeLegacy} from "../interfaces/IL2SharedBridgeLegacy.sol"; +import {BridgedStandardERC20} from "../BridgedStandardERC20.sol"; + +import {DEPLOYER_SYSTEM_CONTRACT, L2_ASSET_ROUTER_ADDR} from "../../common/L2ContractAddresses.sol"; +import {L2ContractHelper, IContractDeployer} from "../../common/libraries/L2ContractHelper.sol"; + +import {SystemContractsCaller} from "../../common/libraries/SystemContractsCaller.sol"; +import {DataEncoding} from "../../common/libraries/DataEncoding.sol"; + +import {EmptyAddress, EmptyBytes32, AddressMismatch, DeployFailed, AssetIdNotSupported} from "../../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice The "default" bridge implementation for the ERC20 tokens. Note, that it does not +/// support any custom token logic, i.e. rebase tokens' functionality is not supported. +contract L2NativeTokenVault is IL2NativeTokenVault, NativeTokenVault { + using SafeERC20 for IERC20; + + IL2SharedBridgeLegacy public immutable L2_LEGACY_SHARED_BRIDGE; + + /// @dev Bytecode hash of the proxy for tokens deployed by the bridge. + bytes32 internal l2TokenProxyBytecodeHash; + + /// @notice Initializes the bridge contract for later use. + /// @param _l1ChainId The L1 chain id differs between mainnet and testnets. + /// @param _l2TokenProxyBytecodeHash The bytecode hash of the proxy for tokens deployed by the bridge. + /// @param _aliasedOwner The address of the governor contract. + /// @param _legacySharedBridge The address of the L2 legacy shared bridge. + /// @param _bridgedTokenBeacon The address of the L2 token beacon for legacy chains. + /// @param _contractsDeployedAlready Ensures beacon proxy for standard ERC20 has not been deployed. + /// @param _wethToken Address of WETH on deployed chain + constructor( + uint256 _l1ChainId, + address _aliasedOwner, + bytes32 _l2TokenProxyBytecodeHash, + address _legacySharedBridge, + address _bridgedTokenBeacon, + bool _contractsDeployedAlready, + address _wethToken, + bytes32 _baseTokenAssetId + ) NativeTokenVault(_wethToken, L2_ASSET_ROUTER_ADDR, _baseTokenAssetId, _l1ChainId) { + L2_LEGACY_SHARED_BRIDGE = IL2SharedBridgeLegacy(_legacySharedBridge); + + if (_l2TokenProxyBytecodeHash == bytes32(0)) { + revert EmptyBytes32(); + } + if (_aliasedOwner == address(0)) { + revert EmptyAddress(); + } + + l2TokenProxyBytecodeHash = _l2TokenProxyBytecodeHash; + _transferOwnership(_aliasedOwner); + + if (_contractsDeployedAlready) { + if (_bridgedTokenBeacon == address(0)) { + revert EmptyAddress(); + } + bridgedTokenBeacon = IBeacon(_bridgedTokenBeacon); + } else { + address l2StandardToken = address(new BridgedStandardERC20{salt: bytes32(0)}()); + + UpgradeableBeacon tokenBeacon = new UpgradeableBeacon{salt: bytes32(0)}(l2StandardToken); + + tokenBeacon.transferOwnership(owner()); + bridgedTokenBeacon = IBeacon(address(tokenBeacon)); + emit L2TokenBeaconUpdated(address(bridgedTokenBeacon), _l2TokenProxyBytecodeHash); + } + } + + /// @notice Sets the legacy token asset ID for the given L2 token address. + function setLegacyTokenAssetId(address _l2TokenAddress) public { + address l1TokenAddress = L2_LEGACY_SHARED_BRIDGE.l1TokenAddress(_l2TokenAddress); + bytes32 newAssetId = DataEncoding.encodeNTVAssetId(L1_CHAIN_ID, l1TokenAddress); + tokenAddress[newAssetId] = _l2TokenAddress; + assetId[_l2TokenAddress] = newAssetId; + originChainId[newAssetId] = L1_CHAIN_ID; + } + + /// @notice Ensures that the token is deployed. + /// @param _originChainId The chain ID of the origin chain. + /// @param _assetId The asset ID. + /// @param _originToken The origin token address. + /// @param _erc20Data The ERC20 data. + /// @return expectedToken The token address. + function _ensureTokenDeployed( + uint256 _originChainId, + bytes32 _assetId, + address _originToken, + bytes memory _erc20Data + ) internal override returns (address expectedToken) { + expectedToken = _assetIdCheck(_originChainId, _assetId, _originToken); + address l1LegacyToken; + if (address(L2_LEGACY_SHARED_BRIDGE) != address(0)) { + l1LegacyToken = L2_LEGACY_SHARED_BRIDGE.l1TokenAddress(expectedToken); + } + + if (l1LegacyToken != address(0)) { + /// token is a legacy token, no need to deploy + if (l1LegacyToken != _originToken) { + revert AddressMismatch(_originToken, l1LegacyToken); + } + tokenAddress[_assetId] = expectedToken; + assetId[expectedToken] = _assetId; + } else { + super._ensureTokenDeployedInner({ + _originChainId: _originChainId, + _assetId: _assetId, + _originToken: _originToken, + _erc20Data: _erc20Data, + _expectedToken: expectedToken + }); + } + } + + /// @notice Deploys the beacon proxy for the L2 token, while using ContractDeployer system contract. + /// @dev This function uses raw call to ContractDeployer to make sure that exactly `l2TokenProxyBytecodeHash` is used + /// for the code of the proxy. + /// @param _salt The salt used for beacon proxy deployment of L2 bridged token. + /// @return proxy The beacon proxy, i.e. L2 bridged token. + function _deployBeaconProxy(bytes32 _salt) internal virtual override returns (BeaconProxy proxy) { + if (address(L2_LEGACY_SHARED_BRIDGE) == address(0)) { + // Deploy the beacon proxy for the L2 token + + (bool success, bytes memory returndata) = SystemContractsCaller.systemCallWithReturndata( + uint32(gasleft()), + DEPLOYER_SYSTEM_CONTRACT, + 0, + abi.encodeCall( + IContractDeployer.create2, + (_salt, l2TokenProxyBytecodeHash, abi.encode(address(bridgedTokenBeacon), "")) + ) + ); + + // The deployment should be successful and return the address of the proxy + if (!success) { + revert DeployFailed(); + } + proxy = BeaconProxy(abi.decode(returndata, (address))); + } else { + // Deploy the beacon proxy for the L2 token + address l2TokenAddr = L2_LEGACY_SHARED_BRIDGE.deployBeaconProxy(_salt); + proxy = BeaconProxy(payable(l2TokenAddr)); + } + } + + function _withdrawFunds(bytes32 _assetId, address _to, address _token, uint256 _amount) internal override { + if (_assetId == BASE_TOKEN_ASSET_ID) { + revert AssetIdNotSupported(BASE_TOKEN_ASSET_ID); + } else { + // Withdraw funds + IERC20(_token).safeTransfer(_to, _amount); + } + } + + /*////////////////////////////////////////////////////////////// + INTERNAL & HELPER FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @notice Calculates L2 wrapped token address given the currently stored beacon proxy bytecode hash and beacon address. + /// @param _l1Token The address of token on L1. + /// @return Address of an L2 token counterpart. + function calculateCreate2TokenAddress( + uint256 _originChainId, + address _l1Token + ) public view virtual override(INativeTokenVault, NativeTokenVault) returns (address) { + bytes32 constructorInputHash = keccak256(abi.encode(address(bridgedTokenBeacon), "")); + bytes32 salt = _getCreate2Salt(_originChainId, _l1Token); + if (address(L2_LEGACY_SHARED_BRIDGE) != address(0)) { + return L2_LEGACY_SHARED_BRIDGE.l2TokenAddress(_l1Token); + } else { + return + L2ContractHelper.computeCreate2Address( + address(this), + salt, + l2TokenProxyBytecodeHash, + constructorInputHash + ); + } + } + + /// @notice Calculates the salt for the Create2 deployment of the L2 token. + function _getCreate2Salt(uint256 _originChainId, address _l1Token) internal view override returns (bytes32 salt) { + salt = _originChainId == L1_CHAIN_ID + ? bytes32(uint256(uint160(_l1Token))) + : keccak256(abi.encode(_originChainId, _l1Token)); + } + + function _handleChainBalanceIncrease( + uint256 _chainId, + bytes32 _assetId, + uint256 _amount, + bool _isNative + ) internal override { + // on L2s we don't track the balance + } + + function _handleChainBalanceDecrease( + uint256 _chainId, + bytes32 _assetId, + uint256 _amount, + bool _isNative + ) internal override { + // on L2s we don't track the balance + } + + /*////////////////////////////////////////////////////////////// + LEGACY FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @notice Calculates L2 wrapped token address corresponding to L1 token counterpart. + /// @param _l1Token The address of token on L1. + /// @return expectedToken The address of token on L2. + function l2TokenAddress(address _l1Token) public view returns (address expectedToken) { + bytes32 expectedAssetId = DataEncoding.encodeNTVAssetId(L1_CHAIN_ID, _l1Token); + expectedToken = tokenAddress[expectedAssetId]; + } +} diff --git a/l1-contracts/contracts/bridge/ntv/NativeTokenVault.sol b/l1-contracts/contracts/bridge/ntv/NativeTokenVault.sol new file mode 100644 index 000000000..c4d11e827 --- /dev/null +++ b/l1-contracts/contracts/bridge/ntv/NativeTokenVault.sol @@ -0,0 +1,471 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable reason-string, gas-custom-errors + +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/security/PausableUpgradeable.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; +import {IBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/IBeacon.sol"; + +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; + +import {IBridgedStandardToken} from "../interfaces/IBridgedStandardToken.sol"; +import {INativeTokenVault} from "./INativeTokenVault.sol"; +import {IAssetHandler} from "../interfaces/IAssetHandler.sol"; +import {IAssetRouterBase} from "../asset-router/IAssetRouterBase.sol"; +import {DataEncoding} from "../../common/libraries/DataEncoding.sol"; + +import {BridgedStandardERC20} from "../BridgedStandardERC20.sol"; +import {BridgeHelper} from "../BridgeHelper.sol"; + +import {EmptyDeposit, Unauthorized, TokensWithFeesNotSupported, TokenNotSupported, NonEmptyMsgValue, ValueMismatch, AddressMismatch, AssetIdMismatch, AmountMustBeGreaterThanZero, ZeroAddress, L1TokenDeploymentWithZeroChainId, DeployingBridgedTokenForNativeToken} from "../../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Vault holding L1 native ETH and ERC20 tokens bridged into the ZK chains. +/// @dev Designed for use with a proxy for upgradability. +abstract contract NativeTokenVault is INativeTokenVault, IAssetHandler, Ownable2StepUpgradeable, PausableUpgradeable { + using SafeERC20 for IERC20; + + /// @dev The address of the WETH token. + address public immutable override WETH_TOKEN; + + /// @dev L1 Shared Bridge smart contract that handles communication with its counterparts on L2s + IAssetRouterBase public immutable override ASSET_ROUTER; + + /// @dev The assetId of the base token. + bytes32 public immutable BASE_TOKEN_ASSET_ID; + + /// @dev Chain ID of L1 for bridging reasons. + uint256 public immutable L1_CHAIN_ID; + + /// @dev Contract that stores the implementation address for token. + /// @dev For more details see https://docs.openzeppelin.com/contracts/3.x/api/proxy#UpgradeableBeacon. + IBeacon public bridgedTokenBeacon; + + /// @dev A mapping assetId => tokenAddress + mapping(bytes32 assetId => uint256 chainId) public originChainId; + + /// @dev A mapping assetId => tokenAddress + mapping(bytes32 assetId => address tokenAddress) public tokenAddress; + + /// @dev A mapping tokenAddress => assetId + mapping(address tokenAddress => bytes32 assetId) public assetId; + + /** + * @dev This empty reserved space is put in place to allow future versions to add new + * variables without shifting down storage in the inheritance chain. + * See https://docs.openzeppelin.com/contracts/4.x/upgradeable#storage_gaps + */ + uint256[46] private __gap; + + /// @notice Checks that the message sender is the bridgehub. + modifier onlyAssetRouter() { + if (msg.sender != address(ASSET_ROUTER)) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @dev Contract is expected to be used as proxy implementation. + /// @dev Disable the initialization to prevent Parity hack. + /// @param _wethToken Address of WETH on deployed chain + /// @param _assetRouter Address of assetRouter + constructor(address _wethToken, address _assetRouter, bytes32 _baseTokenAssetId, uint256 _l1ChainId) { + _disableInitializers(); + L1_CHAIN_ID = _l1ChainId; + ASSET_ROUTER = IAssetRouterBase(_assetRouter); + WETH_TOKEN = _wethToken; + BASE_TOKEN_ASSET_ID = _baseTokenAssetId; + } + + /// @inheritdoc INativeTokenVault + function registerToken(address _nativeToken) external virtual { + _registerToken(_nativeToken); + } + + function _registerToken(address _nativeToken) internal { + if (_nativeToken == WETH_TOKEN) { + revert TokenNotSupported(WETH_TOKEN); + } + require(_nativeToken.code.length > 0, "NTV: empty token"); + _unsafeRegisterNativeToken(_nativeToken); + } + + /// @inheritdoc INativeTokenVault + function ensureTokenIsRegistered(address _nativeToken) public { + if (assetId[_nativeToken] == bytes32(0)) { + _registerToken(_nativeToken); + } + } + + /*////////////////////////////////////////////////////////////// + FINISH TRANSACTION FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IAssetHandler + /// @notice Used when the chain receives a transfer from L1 Shared Bridge and correspondingly mints the asset. + /// @param _chainId The chainId that the message is from. + /// @param _assetId The assetId of the asset being bridged. + /// @param _data The abi.encoded transfer data. + function bridgeMint( + uint256 _chainId, + bytes32 _assetId, + bytes calldata _data + ) external payable override onlyAssetRouter whenNotPaused { + address receiver; + uint256 amount; + // we set all originChainId for all already bridged tokens with the setLegacyTokenAssetId and updateChainBalancesFromSharedBridge functions. + // for tokens that are bridged for the first time, the originChainId will be 0. + if (originChainId[_assetId] == block.chainid) { + (receiver, amount) = _bridgeMintNativeToken(_chainId, _assetId, _data); + } else { + (receiver, amount) = _bridgeMintBridgedToken(_chainId, _assetId, _data); + } + // solhint-disable-next-line func-named-parameters + emit BridgeMint(_chainId, _assetId, receiver, amount); + } + + function _bridgeMintBridgedToken( + uint256 _originChainId, + bytes32 _assetId, + bytes calldata _data + ) internal virtual returns (address receiver, uint256 amount) { + // Either it was bridged before, therefore address is not zero, or it is first time bridging and standard erc20 will be deployed + address token = tokenAddress[_assetId]; + bytes memory erc20Data; + address originToken; + // slither-disable-next-line unused-return + (, receiver, originToken, amount, erc20Data) = DataEncoding.decodeBridgeMintData(_data); + + if (token == address(0)) { + token = _ensureTokenDeployed(_originChainId, _assetId, originToken, erc20Data); + } + _handleChainBalanceDecrease(_originChainId, _assetId, amount, false); + IBridgedStandardToken(token).bridgeMint(receiver, amount); + emit BridgeMint(_originChainId, _assetId, receiver, amount); + } + + function _bridgeMintNativeToken( + uint256 _originChainId, + bytes32 _assetId, + bytes calldata _data + ) internal returns (address receiver, uint256 amount) { + address token = tokenAddress[_assetId]; + // slither-disable-next-line unused-return + (, receiver, , amount, ) = DataEncoding.decodeBridgeMintData(_data); + + _handleChainBalanceDecrease(_originChainId, _assetId, amount, true); + _withdrawFunds(_assetId, receiver, token, amount); + emit BridgeMint(_originChainId, _assetId, receiver, amount); + } + + function _withdrawFunds(bytes32 _assetId, address _to, address _token, uint256 _amount) internal virtual; + + /*////////////////////////////////////////////////////////////// + Start transaction Functions + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IAssetHandler + /// @notice Allows bridgehub to acquire mintValue for L1->L2 transactions. + /// @dev In case of native token vault _data is the tuple of _depositAmount and _receiver. + function bridgeBurn( + uint256 _chainId, + uint256, + bytes32 _assetId, + address _originalCaller, + bytes calldata _data + ) external payable override onlyAssetRouter whenNotPaused returns (bytes memory _bridgeMintData) { + if (originChainId[_assetId] != block.chainid) { + _bridgeMintData = _bridgeBurnBridgedToken(_chainId, _assetId, _originalCaller, _data); + } else { + _bridgeMintData = _bridgeBurnNativeToken({ + _chainId: _chainId, + _assetId: _assetId, + _originalCaller: _originalCaller, + _depositChecked: false, + _data: _data + }); + } + } + + function _bridgeBurnBridgedToken( + uint256 _chainId, + bytes32 _assetId, + address _originalCaller, + bytes calldata _data + ) internal returns (bytes memory _bridgeMintData) { + (uint256 _amount, address _receiver) = abi.decode(_data, (uint256, address)); + if (_amount == 0) { + // "Amount cannot be zero"); + revert AmountMustBeGreaterThanZero(); + } + + address bridgedToken = tokenAddress[_assetId]; + IBridgedStandardToken(bridgedToken).bridgeBurn(_originalCaller, _amount); + + emit BridgeBurn({ + chainId: _chainId, + assetId: _assetId, + sender: _originalCaller, + receiver: _receiver, + amount: _amount + }); + bytes memory erc20Metadata; + { + // we set all originChainId for all already bridged tokens with the setLegacyTokenAssetId and updateChainBalancesFromSharedBridge functions. + // for native tokens the originChainId is set when they register. + uint256 originChainId = originChainId[_assetId]; + if (originChainId == 0) { + revert ZeroAddress(); + } + erc20Metadata = getERC20Getters(bridgedToken, originChainId); + } + address originToken; + { + originToken = IBridgedStandardToken(bridgedToken).originToken(); + if (originToken == address(0)) { + revert ZeroAddress(); + } + } + + _bridgeMintData = DataEncoding.encodeBridgeMintData({ + _originalCaller: _originalCaller, + _l2Receiver: _receiver, + _l1Token: originToken, + _amount: _amount, + _erc20Metadata: erc20Metadata + }); + } + + function _bridgeBurnNativeToken( + uint256 _chainId, + bytes32 _assetId, + address _originalCaller, + bool _depositChecked, + bytes calldata _data + ) internal virtual returns (bytes memory _bridgeMintData) { + (uint256 _depositAmount, address _receiver) = abi.decode(_data, (uint256, address)); + + uint256 amount; + address nativeToken = tokenAddress[_assetId]; + if (_assetId == BASE_TOKEN_ASSET_ID) { + amount = msg.value; + + // In the old SDK/contracts the user had to always provide `0` as the deposit amount for ETH token, while + // ultimately the provided `msg.value` was used as the deposit amount. This check is needed for backwards compatibility. + if (_depositAmount == 0) { + _depositAmount = amount; + } + _handleChainBalanceIncrease(_chainId, _assetId, amount, true); + if (_depositAmount != amount) { + revert ValueMismatch(_depositAmount, amount); + } + } else { + // The Bridgehub also checks this, but we want to be sure + if (msg.value != 0) { + revert NonEmptyMsgValue(); + } + amount = _depositAmount; + _handleChainBalanceIncrease(_chainId, _assetId, amount, true); + if (!_depositChecked) { + uint256 expectedDepositAmount = _depositFunds(_originalCaller, IERC20(nativeToken), _depositAmount); // note if _originalCaller is this contract, this will return 0. This does not happen. + // The token has non-standard transfer logic + if (amount != expectedDepositAmount) { + revert TokensWithFeesNotSupported(); + } + } + } + if (amount == 0) { + // empty deposit amount + revert EmptyDeposit(); + } + + bytes memory erc20Metadata; + { + erc20Metadata = getERC20Getters(nativeToken, originChainId[_assetId]); + } + _bridgeMintData = DataEncoding.encodeBridgeMintData({ + _originalCaller: _originalCaller, + _l2Receiver: _receiver, + _l1Token: nativeToken, + _amount: amount, + _erc20Metadata: erc20Metadata + }); + + emit BridgeBurn({ + chainId: _chainId, + assetId: _assetId, + sender: _originalCaller, + receiver: _receiver, + amount: amount + }); + } + + /*////////////////////////////////////////////////////////////// + INTERNAL & HELPER FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + /// @notice Transfers tokens from the depositor address to the smart contract address. + /// @param _from The address of the depositor. + /// @param _token The ERC20 token to be transferred. + /// @param _amount The amount to be transferred. + /// @return The difference between the contract balance before and after the transferring of funds. + function _depositFunds(address _from, IERC20 _token, uint256 _amount) internal virtual returns (uint256) { + uint256 balanceBefore = _token.balanceOf(address(this)); + // slither-disable-next-line arbitrary-send-erc20 + _token.safeTransferFrom(_from, address(this), _amount); + uint256 balanceAfter = _token.balanceOf(address(this)); + + return balanceAfter - balanceBefore; + } + + /// @param _token The address of token of interest. + /// @dev Receives and parses (name, symbol, decimals) from the token contract + function getERC20Getters(address _token, uint256 _originChainId) public view override returns (bytes memory) { + return BridgeHelper.getERC20Getters(_token, _originChainId); + } + + /// @notice Registers a native token address for the vault. + /// @dev It does not perform any checks for the correctnesss of the token contract. + /// @param _nativeToken The address of the token to be registered. + function _unsafeRegisterNativeToken(address _nativeToken) internal { + bytes32 newAssetId = DataEncoding.encodeNTVAssetId(block.chainid, _nativeToken); + ASSET_ROUTER.setAssetHandlerAddressThisChain(bytes32(uint256(uint160(_nativeToken))), address(this)); + tokenAddress[newAssetId] = _nativeToken; + assetId[_nativeToken] = newAssetId; + originChainId[newAssetId] = block.chainid; + } + + function _handleChainBalanceIncrease( + uint256 _chainId, + bytes32 _assetId, + uint256 _amount, + bool _isNative + ) internal virtual; + + function _handleChainBalanceDecrease( + uint256 _chainId, + bytes32 _assetId, + uint256 _amount, + bool _isNative + ) internal virtual; + + /*////////////////////////////////////////////////////////////// + TOKEN DEPLOYER FUNCTIONS + //////////////////////////////////////////////////////////////*/ + + function _ensureTokenDeployed( + uint256 _originChainId, + bytes32 _assetId, + address _originToken, + bytes memory _erc20Data + ) internal virtual returns (address expectedToken) { + expectedToken = _assetIdCheck(_originChainId, _assetId, _originToken); + _ensureTokenDeployedInner({ + _originChainId: _originChainId, + _assetId: _assetId, + _originToken: _originToken, + _erc20Data: _erc20Data, + _expectedToken: expectedToken + }); + } + + function _assetIdCheck( + uint256 _originChainId, + bytes32 _assetId, + address _originToken + ) internal view returns (address expectedToken) { + expectedToken = calculateCreate2TokenAddress(_originChainId, _originToken); + bytes32 expectedAssetId = DataEncoding.encodeNTVAssetId(_originChainId, _originToken); + if (_assetId != expectedAssetId) { + // Make sure that a NativeTokenVault sent the message + revert AssetIdMismatch(_assetId, expectedAssetId); + } + } + + function _ensureTokenDeployedInner( + uint256 _originChainId, + bytes32 _assetId, + address _originToken, + bytes memory _erc20Data, + address _expectedToken + ) internal { + address deployedToken = _deployBridgedToken(_originChainId, _assetId, _originToken, _erc20Data); + if (deployedToken != _expectedToken) { + revert AddressMismatch(_expectedToken, deployedToken); + } + + tokenAddress[_assetId] = _expectedToken; + assetId[_expectedToken] = _assetId; + } + + /// @notice Calculates the bridged token address corresponding to native token counterpart. + /// @param _bridgeToken The address of native token. + /// @return The address of bridged token. + function calculateCreate2TokenAddress( + uint256 _originChainId, + address _bridgeToken + ) public view virtual override returns (address); + + /// @notice Deploys and initializes the bridged token for the native counterpart. + /// @param _originToken The address of origin token. + /// @param _erc20Data The ERC20 metadata of the token deployed. + /// @return The address of the beacon proxy (bridged token). + function _deployBridgedToken( + uint256 _originChainId, + bytes32 _assetId, + address _originToken, + bytes memory _erc20Data + ) internal returns (address) { + bytes32 salt = _getCreate2Salt(_originChainId, _originToken); + + BeaconProxy l2Token = _deployBeaconProxy(salt); + uint256 tokenOriginChainId = BridgedStandardERC20(address(l2Token)).bridgeInitialize( + _assetId, + _originToken, + _erc20Data + ); + // an extra check for legacy tokens on L1, they might not be registered i.e. + if (block.chainid == L1_CHAIN_ID && tokenOriginChainId == 0) { + revert L1TokenDeploymentWithZeroChainId(_assetId); + } + tokenOriginChainId = tokenOriginChainId == 0 ? L1_CHAIN_ID : tokenOriginChainId; + if (tokenOriginChainId == block.chainid) { + revert DeployingBridgedTokenForNativeToken(); + } + originChainId[DataEncoding.encodeNTVAssetId(tokenOriginChainId, _originToken)] = tokenOriginChainId; + return address(l2Token); + } + + /// @notice Converts the L1 token address to the create2 salt of deployed L2 token. + /// @param _l1Token The address of token on L1. + /// @return salt The salt used to compute address of bridged token on L2 and for beacon proxy deployment. + function _getCreate2Salt(uint256 _originChainId, address _l1Token) internal view virtual returns (bytes32 salt) { + salt = keccak256(abi.encode(_originChainId, _l1Token)); + } + + /// @notice Deploys the beacon proxy for the bridged token. + /// @dev This function uses raw call to ContractDeployer to make sure that exactly `l2TokenProxyBytecodeHash` is used + /// for the code of the proxy. + /// @param _salt The salt used for beacon proxy deployment of the bridged token (we pass the native token address). + /// @return proxy The beacon proxy, i.e. bridged token. + function _deployBeaconProxy(bytes32 _salt) internal virtual returns (BeaconProxy proxy); + + /*////////////////////////////////////////////////////////////// + PAUSE + //////////////////////////////////////////////////////////////*/ + + /// @notice Pauses all functions marked with the `whenNotPaused` modifier. + function pause() external onlyOwner { + _pause(); + } + + /// @notice Unpauses the contract, allowing all functions marked with the `whenNotPaused` modifier to be called again. + function unpause() external onlyOwner { + _unpause(); + } +} diff --git a/l1-contracts/contracts/bridgehub/Bridgehub.sol b/l1-contracts/contracts/bridgehub/Bridgehub.sol index 7b9ecf43b..e084bc3d1 100644 --- a/l1-contracts/contracts/bridgehub/Bridgehub.sol +++ b/l1-contracts/contracts/bridgehub/Bridgehub.sol @@ -2,32 +2,65 @@ pragma solidity 0.8.24; -import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable/access/Ownable2StepUpgradeable.sol"; -import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable/security/PausableUpgradeable.sol"; +// solhint-disable reason-string, gas-custom-errors -import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter, L2TransactionRequestTwoBridgesInner} from "./IBridgehub.sol"; -import {IBridgehub, IL1SharedBridge} from "../bridge/interfaces/IL1SharedBridge.sol"; -import {IStateTransitionManager} from "../state-transition/IStateTransitionManager.sol"; +import {EnumerableMap} from "@openzeppelin/contracts-v4/utils/structs/EnumerableMap.sol"; + +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/security/PausableUpgradeable.sol"; + +import {IBridgehub, L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter, L2TransactionRequestTwoBridgesInner, BridgehubMintCTMAssetData, BridgehubBurnCTMAssetData} from "./IBridgehub.sol"; +import {IAssetRouterBase} from "../bridge/asset-router/IAssetRouterBase.sol"; +import {IL1AssetRouter} from "../bridge/asset-router/IL1AssetRouter.sol"; +import {IL1BaseTokenAssetHandler} from "../bridge/interfaces/IL1BaseTokenAssetHandler.sol"; +import {IChainTypeManager} from "../state-transition/IChainTypeManager.sol"; import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; -import {IZkSyncHyperchain} from "../state-transition/chain-interfaces/IZkSyncHyperchain.sol"; -import {ETH_TOKEN_ADDRESS, TWO_BRIDGES_MAGIC_VALUE, BRIDGEHUB_MIN_SECOND_BRIDGE_ADDRESS} from "../common/Config.sol"; +import {DataEncoding} from "../common/libraries/DataEncoding.sol"; +import {IZKChain} from "../state-transition/chain-interfaces/IZKChain.sol"; + +import {ETH_TOKEN_ADDRESS, TWO_BRIDGES_MAGIC_VALUE, BRIDGEHUB_MIN_SECOND_BRIDGE_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER, L1_SETTLEMENT_LAYER_VIRTUAL_ADDRESS} from "../common/Config.sol"; import {BridgehubL2TransactionRequest, L2Message, L2Log, TxStatus} from "../common/Messaging.sol"; import {AddressAliasHelper} from "../vendor/AddressAliasHelper.sol"; - +import {IMessageRoot} from "./IMessageRoot.sol"; +import {ICTMDeploymentTracker} from "./ICTMDeploymentTracker.sol"; +import {MigrationPaused, AssetIdAlreadyRegistered, ChainAlreadyLive, ChainNotLegacy, CTMNotRegistered, ChainIdNotRegistered, AssetHandlerNotRegistered, ZKChainLimitReached, CTMAlreadyRegistered, CTMNotRegistered, ZeroChainId, ChainIdTooBig, BridgeHubAlreadyRegistered, AddressTooLow, MsgValueMismatch, ZeroAddress, Unauthorized, SharedBridgeNotSet, WrongMagicValue, ChainIdAlreadyExists, ChainIdMismatch, ChainIdCantBeCurrentChain, EmptyAssetId, AssetIdNotSupported, IncorrectBridgeHubAddress} from "../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev The Bridgehub contract serves as the primary entry point for L1<->L2 communication, +/// facilitating interactions between end user and bridges. +/// It also manages state transition managers, base tokens, and chain registrations. +/// Bridgehub is also an IL1AssetHandler for the chains themselves, which is used to migrate the chains +/// between different settlement layers (for example from L1 to Gateway). contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, PausableUpgradeable { - /// @notice all the ether is held by the weth bridge - IL1SharedBridge public sharedBridge; + using EnumerableMap for EnumerableMap.UintToAddressMap; + + /// @notice the asset id of Eth. This is only used on L1. + bytes32 internal immutable ETH_TOKEN_ASSET_ID; + + /// @notice The chain id of L1. This contract can be deployed on multiple layers, but this value is still equal to the + /// L1 that is at the most base layer. + uint256 public immutable L1_CHAIN_ID; + + /// @notice The total number of ZK chains can be created/connected to this CTM. + /// This is the temporary security measure. + uint256 public immutable MAX_NUMBER_OF_ZK_CHAINS; + + /// @notice all the ether and ERC20 tokens are held by NativeVaultToken managed by this shared Bridge. + address public assetRouter; + + /// @notice ChainTypeManagers that are registered, and ZKchains that use these CTMs can use this bridgehub as settlement layer. + mapping(address chainTypeManager => bool) public chainTypeManagerIsRegistered; - /// @notice we store registered stateTransitionManagers - mapping(address _stateTransitionManager => bool) public stateTransitionManagerIsRegistered; /// @notice we store registered tokens (for arbitrary base token) - mapping(address _token => bool) public tokenIsRegistered; + mapping(address baseToken => bool) public __DEPRECATED_tokenIsRegistered; - /// @notice chainID => StateTransitionManager contract address, storing StateTransitionManager - mapping(uint256 _chainId => address) public stateTransitionManager; + /// @notice chainID => ChainTypeManager contract address, CTM that is managing rules for a given ZKchain. + mapping(uint256 chainId => address) public chainTypeManager; - /// @notice chainID => baseToken contract address, storing baseToken - mapping(uint256 _chainId => address) public baseToken; + /// @notice chainID => baseToken contract address, token that is used as 'base token' by a given child chain. + // slither-disable-next-line uninitialized-state + mapping(uint256 chainId => address) public __DEPRECATED_baseToken; /// @dev used to manage non critical updates address public admin; @@ -35,23 +68,126 @@ contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, Paus /// @dev used to accept the admin role address private pendingAdmin; + /// @notice The map from chainId => zkChain contract + EnumerableMap.UintToAddressMap internal zkChainMap; + + /// @notice The contract that stores the cross-chain message root for each chain and the aggregated root. + /// @dev Note that the message root does not contain messages from the chain it is deployed on. It may + /// be added later on if needed. + IMessageRoot public override messageRoot; + + /// @notice Mapping from chain id to encoding of the base token used for deposits / withdrawals + mapping(uint256 chainId => bytes32) public baseTokenAssetId; + + /// @notice The deployment tracker for the state transition managers. + /// @dev The L1 address of the ctm deployer is provided. + ICTMDeploymentTracker public l1CtmDeployer; + + /// @dev asset info used to identify chains in the Shared Bridge + mapping(bytes32 ctmAssetId => address ctmAddress) public ctmAssetIdToAddress; + + /// @dev ctmAddress to ctmAssetId + mapping(address ctmAddress => bytes32 ctmAssetId) public ctmAssetIdFromAddress; + + /// @dev used to indicate the currently active settlement layer for a given chainId + mapping(uint256 chainId => uint256 activeSettlementLayerChainId) public settlementLayer; + + /// @notice shows whether the given chain can be used as a settlement layer. + /// @dev the Gateway will be one of the possible settlement layers. The L1 is also a settlement layer. + /// @dev Sync layer chain is expected to have .. as the base token. + mapping(uint256 chainId => bool isWhitelistedSettlementLayer) public whitelistedSettlementLayers; + + /// @notice we store registered assetIds (for arbitrary base token) + mapping(bytes32 baseTokenAssetId => bool) public assetIdIsRegistered; + + /// @notice used to pause the migrations of chains. Used for upgrades. + bool public migrationPaused; + + modifier onlyOwnerOrAdmin() { + if (msg.sender != admin && msg.sender != owner()) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyChainCTM(uint256 _chainId) { + if (msg.sender != chainTypeManager[_chainId]) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyL1() { + if (L1_CHAIN_ID != block.chainid) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlySettlementLayerRelayedSender() { + /// There is no sender for the wrapping, we use a virtual address. + if (msg.sender != SETTLEMENT_LAYER_RELAY_SENDER) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyAssetRouter() { + if (msg.sender != assetRouter) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier whenMigrationsNotPaused() { + if (migrationPaused) { + revert MigrationPaused(); + } + _; + } + /// @notice to avoid parity hack - constructor() reentrancyGuardInitializer {} + constructor(uint256 _l1ChainId, address _owner, uint256 _maxNumberOfZKChains) reentrancyGuardInitializer { + _disableInitializers(); + L1_CHAIN_ID = _l1ChainId; + MAX_NUMBER_OF_ZK_CHAINS = _maxNumberOfZKChains; + + // Note that this assumes that the bridgehub only accepts transactions on chains with ETH base token only. + // This is indeed true, since the only methods where this immutable is used are the ones with `onlyL1` modifier. + // We will change this with interop. + ETH_TOKEN_ASSET_ID = DataEncoding.encodeNTVAssetId(L1_CHAIN_ID, ETH_TOKEN_ADDRESS); + _transferOwnership(_owner); + _initializeInner(); + } /// @notice used to initialize the contract - function initialize(address _owner) external reentrancyGuardInitializer { + /// @notice this contract is also deployed on L2 as a system contract there the owner and the related functions will not be used + /// @param _owner the owner of the contract + function initialize(address _owner) external reentrancyGuardInitializer onlyL1 { _transferOwnership(_owner); + _initializeInner(); } - modifier onlyOwnerOrAdmin() { - require(msg.sender == admin || msg.sender == owner(), "Bridgehub: not owner or admin"); - _; + /// @notice Used to initialize the contract on L1 + function initializeV2() external initializer onlyL1 { + _initializeInner(); + } + + /// @notice Initializes the contract + function _initializeInner() internal { + assetIdIsRegistered[ETH_TOKEN_ASSET_ID] = true; + whitelistedSettlementLayers[L1_CHAIN_ID] = true; } + //// Initialization and registration + /// @inheritdoc IBridgehub /// @dev Please note, if the owner wants to enforce the admin change it must execute both `setPendingAdmin` and /// `acceptAdmin` atomically. Otherwise `admin` can set different pending admin and so fail to accept the admin rights. function setPendingAdmin(address _newPendingAdmin) external onlyOwnerOrAdmin { + if (_newPendingAdmin == address(0)) { + revert ZeroAddress(); + } // Save previous value into the stack to put it into the event later address oldPendingAdmin = pendingAdmin; // Change pending admin @@ -62,7 +198,10 @@ contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, Paus /// @inheritdoc IBridgehub function acceptAdmin() external { address currentPendingAdmin = pendingAdmin; - require(msg.sender == currentPendingAdmin, "n42"); // Only proposed by current admin address can claim the admin rights + // Only proposed by current admin address can claim the admin rights + if (msg.sender != currentPendingAdmin) { + revert Unauthorized(msg.sender); + } address previousAdmin = admin; admin = currentPendingAdmin; @@ -72,170 +211,275 @@ contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, Paus emit NewAdmin(previousAdmin, currentPendingAdmin); } - ///// Getters + /// @notice To set the addresses of some of the ecosystem contracts, only Owner. Not done in initialize, as + /// the order of deployment is Bridgehub, other contracts, and then we call this. + /// @param _assetRouter the shared bridge address + /// @param _l1CtmDeployer the ctm deployment tracker address. Note, that the address of the L1 CTM deployer is provided. + /// @param _messageRoot the message root address + function setAddresses( + address _assetRouter, + ICTMDeploymentTracker _l1CtmDeployer, + IMessageRoot _messageRoot + ) external onlyOwner { + assetRouter = _assetRouter; + l1CtmDeployer = _l1CtmDeployer; + messageRoot = _messageRoot; + } - /// @notice return the state transition chain contract for a chainId - function getHyperchain(uint256 _chainId) public view returns (address) { - return IStateTransitionManager(stateTransitionManager[_chainId]).getHyperchain(_chainId); + /// @notice Used for the upgrade to set the baseTokenAssetId previously stored as baseToken. + /// @param _chainId the chainId of the chain. + function setLegacyBaseTokenAssetId(uint256 _chainId) external override { + if (baseTokenAssetId[_chainId] == bytes32(0)) { + return; + } + address token = __DEPRECATED_baseToken[_chainId]; + require(token != address(0), "BH: token not set"); + baseTokenAssetId[_chainId] = DataEncoding.encodeNTVAssetId(block.chainid, token); + } + + /// @notice Used to set the legacy chain address for the upgrade. + /// @param _chainId The chainId of the legacy chain we are migrating. + function setLegacyChainAddress(uint256 _chainId) external override { + address ctm = chainTypeManager[_chainId]; + if (ctm == address(0)) { + revert ChainNotLegacy(); + } + if (zkChainMap.contains(_chainId)) { + revert ChainAlreadyLive(); + } + address chainAddress = IChainTypeManager(ctm).getZKChainLegacy(_chainId); + if (chainAddress == address(0)) { + revert ChainNotLegacy(); + } + _registerNewZKChain(_chainId, chainAddress); } //// Registry /// @notice State Transition can be any contract with the appropriate interface/functionality - function addStateTransitionManager(address _stateTransitionManager) external onlyOwner { - require( - !stateTransitionManagerIsRegistered[_stateTransitionManager], - "Bridgehub: state transition already registered" - ); - stateTransitionManagerIsRegistered[_stateTransitionManager] = true; + /// @param _chainTypeManager the state transition manager address to be added + function addChainTypeManager(address _chainTypeManager) external onlyOwner { + if (_chainTypeManager == address(0)) { + revert ZeroAddress(); + } + if (chainTypeManagerIsRegistered[_chainTypeManager]) { + revert CTMAlreadyRegistered(); + } + chainTypeManagerIsRegistered[_chainTypeManager] = true; + + emit ChainTypeManagerAdded(_chainTypeManager); } /// @notice State Transition can be any contract with the appropriate interface/functionality /// @notice this stops new Chains from using the STF, old chains are not affected - function removeStateTransitionManager(address _stateTransitionManager) external onlyOwner { - require( - stateTransitionManagerIsRegistered[_stateTransitionManager], - "Bridgehub: state transition not registered yet" - ); - stateTransitionManagerIsRegistered[_stateTransitionManager] = false; + /// @param _chainTypeManager the state transition manager address to be removed + function removeChainTypeManager(address _chainTypeManager) external onlyOwner { + if (_chainTypeManager == address(0)) { + revert ZeroAddress(); + } + if (!chainTypeManagerIsRegistered[_chainTypeManager]) { + revert CTMNotRegistered(); + } + chainTypeManagerIsRegistered[_chainTypeManager] = false; + + emit ChainTypeManagerRemoved(_chainTypeManager); } - /// @notice token can be any contract with the appropriate interface/functionality - function addToken(address _token) external onlyOwnerOrAdmin { - require(!tokenIsRegistered[_token], "Bridgehub: token already registered"); - tokenIsRegistered[_token] = true; + /// @notice asset id can represent any token contract with the appropriate interface/functionality + /// @param _baseTokenAssetId asset id of base token to be registered + function addTokenAssetId(bytes32 _baseTokenAssetId) external onlyOwnerOrAdmin { + if (assetIdIsRegistered[_baseTokenAssetId]) { + revert AssetIdAlreadyRegistered(); + } + assetIdIsRegistered[_baseTokenAssetId] = true; + + emit BaseTokenAssetIdRegistered(_baseTokenAssetId); } - /// @notice To set shared bridge, only Owner. Not done in initialize, as - /// the order of deployment is Bridgehub, Shared bridge, and then we call this - function setSharedBridge(address _sharedBridge) external onlyOwner { - sharedBridge = IL1SharedBridge(_sharedBridge); + /// @notice Used to register a chain as a settlement layer. + /// @param _newSettlementLayerChainId the chainId of the chain + /// @param _isWhitelisted whether the chain is a whitelisted settlement layer + function registerSettlementLayer( + uint256 _newSettlementLayerChainId, + bool _isWhitelisted + ) external onlyOwner onlyL1 { + whitelistedSettlementLayers[_newSettlementLayerChainId] = _isWhitelisted; + emit SettlementLayerRegistered(_newSettlementLayerChainId, _isWhitelisted); } - /// @notice register new chain + /// @dev Used to set the assetAddress for a given assetInfo. + /// @param _additionalData the additional data to identify the asset + /// @param _assetAddress the asset handler address + function setAssetHandlerAddress(bytes32 _additionalData, address _assetAddress) external { + // It is a simplified version of the logic used by the AssetRouter to manage asset handlers. + // CTM's assetId is `keccak256(abi.encode(L1_CHAIN_ID, l1CtmDeployer, ctmAddress))`. + // And the l1CtmDeployer is considered the deployment tracker for the CTM asset. + // + // The l1CtmDeployer will call this method to set the asset handler address for the assetId. + // If the chain is not the same as L1, we assume that it is done via L1->L2 communication and so we unalias the sender. + // + // For simpler handling we allow anyone to call this method. It is okay, since during bridging operations + // it is double checked that `assetId` is indeed derived from the `l1CtmDeployer`. + // TODO(EVM-703): This logic should be revised once interchain communication is implemented. + + address sender = L1_CHAIN_ID == block.chainid ? msg.sender : AddressAliasHelper.undoL1ToL2Alias(msg.sender); + // This method can be accessed by l1CtmDeployer only + if (sender != address(l1CtmDeployer)) { + revert Unauthorized(sender); + } + if (!chainTypeManagerIsRegistered[_assetAddress]) { + revert CTMNotRegistered(); + } + + bytes32 assetInfo = keccak256(abi.encode(L1_CHAIN_ID, sender, _additionalData)); + ctmAssetIdToAddress[assetInfo] = _assetAddress; + ctmAssetIdFromAddress[_assetAddress] = assetInfo; + emit AssetRegistered(assetInfo, _assetAddress, _additionalData, msg.sender); + } + + /*////////////////////////////////////////////////////////////// + Chain Registration + //////////////////////////////////////////////////////////////*/ + + /// @notice register new chain. New chains can be only registered on Bridgehub deployed on L1. Later they can be moved to any other layer. /// @notice for Eth the baseToken address is 1 + /// @param _chainId the chainId of the chain + /// @param _chainTypeManager the state transition manager address + /// @param _baseTokenAssetId the base token asset id of the chain + /// @param _salt the salt for the chainId, currently not used + /// @param _admin the admin of the chain + /// @param _initData the fixed initialization data for the chain + /// @param _factoryDeps the factory dependencies for the chain's deployment function createNewChain( uint256 _chainId, - address _stateTransitionManager, - address _baseToken, + address _chainTypeManager, + bytes32 _baseTokenAssetId, // solhint-disable-next-line no-unused-vars uint256 _salt, address _admin, - bytes calldata _initData - ) external onlyOwnerOrAdmin nonReentrant whenNotPaused returns (uint256) { - require(_chainId != 0, "Bridgehub: chainId cannot be 0"); - require(_chainId <= type(uint48).max, "Bridgehub: chainId too large"); - - require( - stateTransitionManagerIsRegistered[_stateTransitionManager], - "Bridgehub: state transition not registered" - ); - require(tokenIsRegistered[_baseToken], "Bridgehub: token not registered"); - require(address(sharedBridge) != address(0), "Bridgehub: weth bridge not set"); + bytes calldata _initData, + bytes[] calldata _factoryDeps + ) external onlyOwnerOrAdmin nonReentrant whenNotPaused onlyL1 returns (uint256) { + _validateChainParams({_chainId: _chainId, _assetId: _baseTokenAssetId, _chainTypeManager: _chainTypeManager}); - require(stateTransitionManager[_chainId] == address(0), "Bridgehub: chainId already registered"); + chainTypeManager[_chainId] = _chainTypeManager; - stateTransitionManager[_chainId] = _stateTransitionManager; - baseToken[_chainId] = _baseToken; + baseTokenAssetId[_chainId] = _baseTokenAssetId; + settlementLayer[_chainId] = block.chainid; - IStateTransitionManager(_stateTransitionManager).createNewChain({ + address chainAddress = IChainTypeManager(_chainTypeManager).createNewChain({ _chainId: _chainId, - _baseToken: _baseToken, - _sharedBridge: address(sharedBridge), + _baseTokenAssetId: _baseTokenAssetId, _admin: _admin, - _diamondCut: _initData + _initData: _initData, + _factoryDeps: _factoryDeps }); + _registerNewZKChain(_chainId, chainAddress); + messageRoot.addNewChain(_chainId); - emit NewChain(_chainId, _stateTransitionManager, _admin); + emit NewChain(_chainId, _chainTypeManager, _admin); return _chainId; } - //// Mailbox forwarder + /// @dev This internal function is used to register a new zkChain in the system. + function _registerNewZKChain(uint256 _chainId, address _zkChain) internal { + // slither-disable-next-line unused-return + zkChainMap.set(_chainId, _zkChain); + if (zkChainMap.length() > MAX_NUMBER_OF_ZK_CHAINS) { + revert ZKChainLimitReached(); + } + } - /// @notice forwards function call to Mailbox based on ChainId - function proveL2MessageInclusion( - uint256 _chainId, - uint256 _batchNumber, - uint256 _index, - L2Message calldata _message, - bytes32[] calldata _proof - ) external view override returns (bool) { - address hyperchain = getHyperchain(_chainId); - return IZkSyncHyperchain(hyperchain).proveL2MessageInclusion(_batchNumber, _index, _message, _proof); + /*////////////////////////////////////////////////////////////// + Getters + //////////////////////////////////////////////////////////////*/ + + /// @notice baseToken function, which takes chainId as input, reads assetHandler from AR, and tokenAddress from AH + function baseToken(uint256 _chainId) public view returns (address) { + bytes32 baseTokenAssetId = baseTokenAssetId[_chainId]; + address assetHandlerAddress = IAssetRouterBase(assetRouter).assetHandlerAddress(baseTokenAssetId); + + // It is possible that the asset handler is not deployed for a chain on the current layer. + // In this case we throw an error. + if (assetHandlerAddress == address(0)) { + revert AssetHandlerNotRegistered(baseTokenAssetId); + } + return IL1BaseTokenAssetHandler(assetHandlerAddress).tokenAddress(baseTokenAssetId); } - /// @notice forwards function call to Mailbox based on ChainId - function proveL2LogInclusion( - uint256 _chainId, - uint256 _batchNumber, - uint256 _index, - L2Log memory _log, - bytes32[] calldata _proof - ) external view override returns (bool) { - address hyperchain = getHyperchain(_chainId); - return IZkSyncHyperchain(hyperchain).proveL2LogInclusion(_batchNumber, _index, _log, _proof); + /// @notice Returns all the registered zkChain addresses + function getAllZKChains() public view override returns (address[] memory chainAddresses) { + uint256[] memory keys = zkChainMap.keys(); + chainAddresses = new address[](keys.length); + uint256 keysLength = keys.length; + for (uint256 i = 0; i < keysLength; ++i) { + chainAddresses[i] = zkChainMap.get(keys[i]); + } } - /// @notice forwards function call to Mailbox based on ChainId - function proveL1ToL2TransactionStatus( - uint256 _chainId, - bytes32 _l2TxHash, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes32[] calldata _merkleProof, - TxStatus _status - ) external view override returns (bool) { - address hyperchain = getHyperchain(_chainId); - return - IZkSyncHyperchain(hyperchain).proveL1ToL2TransactionStatus({ - _l2TxHash: _l2TxHash, - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _merkleProof: _merkleProof, - _status: _status - }); + /// @notice Returns all the registered zkChain chainIDs + function getAllZKChainChainIDs() public view override returns (uint256[] memory) { + return zkChainMap.keys(); } - /// @notice forwards function call to Mailbox based on ChainId - function l2TransactionBaseCost( - uint256 _chainId, - uint256 _gasPrice, - uint256 _l2GasLimit, - uint256 _l2GasPerPubdataByteLimit - ) external view returns (uint256) { - address hyperchain = getHyperchain(_chainId); - return IZkSyncHyperchain(hyperchain).l2TransactionBaseCost(_gasPrice, _l2GasLimit, _l2GasPerPubdataByteLimit); + /// @notice Returns the address of the ZK chain with the corresponding chainID + /// @param _chainId the chainId of the chain + /// @return chainAddress the address of the ZK chain + function getZKChain(uint256 _chainId) public view override returns (address chainAddress) { + // slither-disable-next-line unused-return + (, chainAddress) = zkChainMap.tryGet(_chainId); } - /// @notice the mailbox is called directly after the sharedBridge received the deposit + function ctmAssetIdFromChainId(uint256 _chainId) public view override returns (bytes32) { + address ctmAddress = chainTypeManager[_chainId]; + if (ctmAddress == address(0)) { + revert ChainIdNotRegistered(_chainId); + } + return ctmAssetIdFromAddress[chainTypeManager[_chainId]]; + } + + function calculateCtmAssetId(address _ctmAddress) internal view returns (bytes32) { + return keccak256(abi.encode(L1_CHAIN_ID, address(l1CtmDeployer), bytes32(uint256(uint160(_ctmAddress))))); + } + + /*////////////////////////////////////////////////////////////// + Mailbox forwarder + //////////////////////////////////////////////////////////////*/ + + /// @notice the mailbox is called directly after the assetRouter received the deposit /// this assumes that either ether is the base token or - /// the msg.sender has approved mintValue allowance for the sharedBridge. - /// This means this is not ideal for contract calls, as the contract would have to handle token allowance of the base Token + /// the msg.sender has approved mintValue allowance for the nativeTokenVault. + /// This means this is not ideal for contract calls, as the contract would have to handle token allowance of the base Token. + /// In case allowance is provided to the Shared Bridge, then it will be transferred to NTV. function requestL2TransactionDirect( L2TransactionRequestDirect calldata _request - ) external payable override nonReentrant whenNotPaused returns (bytes32 canonicalTxHash) { + ) external payable override nonReentrant whenNotPaused onlyL1 returns (bytes32 canonicalTxHash) { + // Note: If the ZK chain with corresponding `chainId` is not yet created, + // the transaction will revert on `bridgehubRequestL2Transaction` as call to zero address. { - address token = baseToken[_request.chainId]; - if (token == ETH_TOKEN_ADDRESS) { - require(msg.value == _request.mintValue, "Bridgehub: msg.value mismatch 1"); + bytes32 tokenAssetId = baseTokenAssetId[_request.chainId]; + if (tokenAssetId == ETH_TOKEN_ASSET_ID) { + if (msg.value != _request.mintValue) { + revert MsgValueMismatch(_request.mintValue, msg.value); + } } else { - require(msg.value == 0, "Bridgehub: non-eth bridge with msg.value"); + if (msg.value != 0) { + revert MsgValueMismatch(0, msg.value); + } } // slither-disable-next-line arbitrary-send-eth - sharedBridge.bridgehubDepositBaseToken{value: msg.value}( + IL1AssetRouter(assetRouter).bridgehubDepositBaseToken{value: msg.value}( _request.chainId, + tokenAssetId, msg.sender, - token, _request.mintValue ); } - address hyperchain = getHyperchain(_request.chainId); - address refundRecipient = AddressAliasHelper.actualRefundRecipient(_request.refundRecipient, msg.sender); - canonicalTxHash = IZkSyncHyperchain(hyperchain).bridgehubRequestL2Transaction( + canonicalTxHash = _sendRequest( + _request.chainId, + _request.refundRecipient, BridgehubL2TransactionRequest({ sender: msg.sender, contractL2: _request.l2Contract, @@ -245,49 +489,55 @@ contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, Paus l2GasLimit: _request.l2GasLimit, l2GasPerPubdataByteLimit: _request.l2GasPerPubdataByteLimit, factoryDeps: _request.factoryDeps, - refundRecipient: refundRecipient + refundRecipient: address(0) }) ); } - /// @notice After depositing funds to the sharedBridge, the secondBridge is called + /// @notice After depositing funds to the assetRouter, the secondBridge is called /// to return the actual L2 message which is sent to the Mailbox. /// This assumes that either ether is the base token or - /// the msg.sender has approved the sharedBridge with the mintValue, + /// the msg.sender has approved the nativeTokenVault with the mintValue, /// and also the necessary approvals are given for the second bridge. + /// In case allowance is provided to the Shared Bridge, then it will be transferred to NTV. /// @notice The logic of this bridge is to allow easy depositing for bridges. /// Each contract that handles the users ERC20 tokens needs approvals from the user, this contract allows /// the user to approve for each token only its respective bridge /// @notice This function is great for contract calls to L2, the secondBridge can be any contract. + /// @param _request the request for the L2 transaction function requestL2TransactionTwoBridges( L2TransactionRequestTwoBridgesOuter calldata _request - ) external payable override nonReentrant whenNotPaused returns (bytes32 canonicalTxHash) { + ) external payable override nonReentrant whenNotPaused onlyL1 returns (bytes32 canonicalTxHash) { + if (_request.secondBridgeAddress <= BRIDGEHUB_MIN_SECOND_BRIDGE_ADDRESS) { + revert AddressTooLow(_request.secondBridgeAddress); + } + { - address token = baseToken[_request.chainId]; + bytes32 tokenAssetId = baseTokenAssetId[_request.chainId]; uint256 baseTokenMsgValue; - if (token == ETH_TOKEN_ADDRESS) { - require( - msg.value == _request.mintValue + _request.secondBridgeValue, - "Bridgehub: msg.value mismatch 2" - ); + if (tokenAssetId == ETH_TOKEN_ASSET_ID) { + if (msg.value != _request.mintValue + _request.secondBridgeValue) { + revert MsgValueMismatch(_request.mintValue + _request.secondBridgeValue, msg.value); + } baseTokenMsgValue = _request.mintValue; } else { - require(msg.value == _request.secondBridgeValue, "Bridgehub: msg.value mismatch 3"); + if (msg.value != _request.secondBridgeValue) { + revert MsgValueMismatch(_request.secondBridgeValue, msg.value); + } baseTokenMsgValue = 0; } + // slither-disable-next-line arbitrary-send-eth - sharedBridge.bridgehubDepositBaseToken{value: baseTokenMsgValue}( + IL1AssetRouter(assetRouter).bridgehubDepositBaseToken{value: baseTokenMsgValue}( _request.chainId, + tokenAssetId, msg.sender, - token, _request.mintValue ); } - address hyperchain = getHyperchain(_request.chainId); - // slither-disable-next-line arbitrary-send-eth - L2TransactionRequestTwoBridgesInner memory outputRequest = IL1SharedBridge(_request.secondBridgeAddress) + L2TransactionRequestTwoBridgesInner memory outputRequest = IL1AssetRouter(_request.secondBridgeAddress) .bridgehubDeposit{value: _request.secondBridgeValue}( _request.chainId, msg.sender, @@ -295,15 +545,13 @@ contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, Paus _request.secondBridgeCalldata ); - require(outputRequest.magicValue == TWO_BRIDGES_MAGIC_VALUE, "Bridgehub: magic value mismatch"); - - address refundRecipient = AddressAliasHelper.actualRefundRecipient(_request.refundRecipient, msg.sender); + if (outputRequest.magicValue != TWO_BRIDGES_MAGIC_VALUE) { + revert WrongMagicValue(uint256(TWO_BRIDGES_MAGIC_VALUE), uint256(outputRequest.magicValue)); + } - require( - _request.secondBridgeAddress > BRIDGEHUB_MIN_SECOND_BRIDGE_ADDRESS, - "Bridgehub: second bridge address too low" - ); // to avoid calls to precompiles - canonicalTxHash = IZkSyncHyperchain(hyperchain).bridgehubRequestL2Transaction( + canonicalTxHash = _sendRequest( + _request.chainId, + _request.refundRecipient, BridgehubL2TransactionRequest({ sender: _request.secondBridgeAddress, contractL2: outputRequest.l2Contract, @@ -313,17 +561,311 @@ contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, Paus l2GasLimit: _request.l2GasLimit, l2GasPerPubdataByteLimit: _request.l2GasPerPubdataByteLimit, factoryDeps: outputRequest.factoryDeps, - refundRecipient: refundRecipient + refundRecipient: address(0) }) ); - IL1SharedBridge(_request.secondBridgeAddress).bridgehubConfirmL2Transaction( + IL1AssetRouter(_request.secondBridgeAddress).bridgehubConfirmL2Transaction( _request.chainId, outputRequest.txDataHash, canonicalTxHash ); } + /// @notice This function is used to send a request to the ZK chain. + /// @param _chainId the chainId of the chain + /// @param _refundRecipient the refund recipient + /// @param _request the request + /// @return canonicalTxHash the canonical transaction hash + function _sendRequest( + uint256 _chainId, + address _refundRecipient, + BridgehubL2TransactionRequest memory _request + ) internal returns (bytes32 canonicalTxHash) { + address refundRecipient = AddressAliasHelper.actualRefundRecipient(_refundRecipient, msg.sender); + _request.refundRecipient = refundRecipient; + address zkChain = zkChainMap.get(_chainId); + + canonicalTxHash = IZKChain(zkChain).bridgehubRequestL2Transaction(_request); + } + + /// @notice Used to forward a transaction on the gateway to the chains mailbox (from L1). + /// @param _chainId the chainId of the chain + /// @param _canonicalTxHash the canonical transaction hash + /// @param _expirationTimestamp the expiration timestamp for the transaction + function forwardTransactionOnGateway( + uint256 _chainId, + bytes32 _canonicalTxHash, + uint64 _expirationTimestamp + ) external override onlySettlementLayerRelayedSender { + require(L1_CHAIN_ID != block.chainid, "BH: not in sync layer mode"); + address zkChain = zkChainMap.get(_chainId); + IZKChain(zkChain).bridgehubRequestL2TransactionOnGateway(_canonicalTxHash, _expirationTimestamp); + } + + /// @notice forwards function call to Mailbox based on ChainId + /// @param _chainId The chain ID of the ZK chain where to prove L2 message inclusion. + /// @param _batchNumber The executed L2 batch number in which the message appeared + /// @param _index The position in the L2 logs Merkle tree of the l2Log that was sent with the message + /// @param _message Information about the sent message: sender address, the message itself, tx index in the L2 batch where the message was sent + /// @param _proof Merkle proof for inclusion of L2 log that was sent with the message + /// @return Whether the proof is valid + function proveL2MessageInclusion( + uint256 _chainId, + uint256 _batchNumber, + uint256 _index, + L2Message calldata _message, + bytes32[] calldata _proof + ) external view override returns (bool) { + address zkChain = zkChainMap.get(_chainId); + return IZKChain(zkChain).proveL2MessageInclusion(_batchNumber, _index, _message, _proof); + } + + /// @notice forwards function call to Mailbox based on ChainId + /// @param _chainId The chain ID of the ZK chain where to prove L2 log inclusion. + /// @param _batchNumber The executed L2 batch number in which the log appeared + /// @param _index The position of the l2log in the L2 logs Merkle tree + /// @param _log Information about the sent log + /// @param _proof Merkle proof for inclusion of the L2 log + /// @return Whether the proof is correct and L2 log is included in batch + function proveL2LogInclusion( + uint256 _chainId, + uint256 _batchNumber, + uint256 _index, + L2Log calldata _log, + bytes32[] calldata _proof + ) external view override returns (bool) { + address zkChain = zkChainMap.get(_chainId); + return IZKChain(zkChain).proveL2LogInclusion(_batchNumber, _index, _log, _proof); + } + + /// @notice forwards function call to Mailbox based on ChainId + /// @param _chainId The chain ID of the ZK chain where to prove L1->L2 tx status. + /// @param _l2TxHash The L2 canonical transaction hash + /// @param _l2BatchNumber The L2 batch number where the transaction was processed + /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message + /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent + /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction + /// @param _status The execution status of the L1 -> L2 transaction (true - success & 0 - fail) + /// @return Whether the proof is correct and the transaction was actually executed with provided status + /// NOTE: It may return `false` for incorrect proof, but it doesn't mean that the L1 -> L2 transaction has an opposite status! + function proveL1ToL2TransactionStatus( + uint256 _chainId, + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof, + TxStatus _status + ) external view override returns (bool) { + address zkChain = zkChainMap.get(_chainId); + return + IZKChain(zkChain).proveL1ToL2TransactionStatus({ + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof, + _status: _status + }); + } + + /// @notice forwards function call to Mailbox based on ChainId + function l2TransactionBaseCost( + uint256 _chainId, + uint256 _gasPrice, + uint256 _l2GasLimit, + uint256 _l2GasPerPubdataByteLimit + ) external view returns (uint256) { + address zkChain = zkChainMap.get(_chainId); + return IZKChain(zkChain).l2TransactionBaseCost(_gasPrice, _l2GasLimit, _l2GasPerPubdataByteLimit); + } + + /*////////////////////////////////////////////////////////////// + Chain migration + //////////////////////////////////////////////////////////////*/ + + /// @notice IL1AssetHandler interface, used to migrate (transfer) a chain to the settlement layer. + /// @param _settlementChainId the chainId of the settlement chain, i.e. where the message and the migrating chain is sent. + /// @param _assetId the assetId of the migrating chain's CTM + /// @param _originalCaller the message sender initiated a set of calls that leads to bridge burn + /// @param _data the data for the migration + function bridgeBurn( + uint256 _settlementChainId, + uint256, // msgValue + bytes32 _assetId, + address _originalCaller, + bytes calldata _data + ) external payable override onlyAssetRouter whenMigrationsNotPaused returns (bytes memory bridgehubMintData) { + require(whitelistedSettlementLayers[_settlementChainId], "BH: SL not whitelisted"); + + BridgehubBurnCTMAssetData memory bridgehubData = abi.decode(_data, (BridgehubBurnCTMAssetData)); + require(_assetId == ctmAssetIdFromChainId(bridgehubData.chainId), "BH: assetInfo 1"); + require(settlementLayer[bridgehubData.chainId] == block.chainid, "BH: not current SL"); + settlementLayer[bridgehubData.chainId] = _settlementChainId; + + address zkChain = zkChainMap.get(bridgehubData.chainId); + require(zkChain != address(0), "BH: zkChain not registered"); + require(_originalCaller == IZKChain(zkChain).getAdmin(), "BH: incorrect sender"); + + bytes memory ctmMintData = IChainTypeManager(chainTypeManager[bridgehubData.chainId]).forwardedBridgeBurn( + bridgehubData.chainId, + bridgehubData.ctmData + ); + bytes memory chainMintData = IZKChain(zkChain).forwardedBridgeBurn( + _settlementChainId == L1_CHAIN_ID + ? L1_SETTLEMENT_LAYER_VIRTUAL_ADDRESS + : zkChainMap.get(_settlementChainId), + _originalCaller, + bridgehubData.chainData + ); + BridgehubMintCTMAssetData memory bridgeMintStruct = BridgehubMintCTMAssetData({ + chainId: bridgehubData.chainId, + baseTokenAssetId: baseTokenAssetId[bridgehubData.chainId], + ctmData: ctmMintData, + chainData: chainMintData + }); + bridgehubMintData = abi.encode(bridgeMintStruct); + + emit MigrationStarted(bridgehubData.chainId, _assetId, _settlementChainId); + } + + /// @dev IL1AssetHandler interface, used to receive a chain on the settlement layer. + /// @param _assetId the assetId of the chain's STM + /// @param _bridgehubMintData the data for the mint + function bridgeMint( + uint256, // originChainId + bytes32 _assetId, + bytes calldata _bridgehubMintData + ) external payable override onlyAssetRouter whenMigrationsNotPaused { + BridgehubMintCTMAssetData memory bridgehubData = abi.decode(_bridgehubMintData, (BridgehubMintCTMAssetData)); + + address ctm = ctmAssetIdToAddress[_assetId]; + require(ctm != address(0), "BH: assetInfo 2"); + require(settlementLayer[bridgehubData.chainId] != block.chainid, "BH: already current SL"); + + settlementLayer[bridgehubData.chainId] = block.chainid; + chainTypeManager[bridgehubData.chainId] = ctm; + baseTokenAssetId[bridgehubData.chainId] = bridgehubData.baseTokenAssetId; + // To keep `assetIdIsRegistered` consistent, we'll also automatically register the base token. + // It is assumed that if the bridging happened, the token was approved on L1 already. + assetIdIsRegistered[bridgehubData.baseTokenAssetId] = true; + + address zkChain = getZKChain(bridgehubData.chainId); + bool contractAlreadyDeployed = zkChain != address(0); + if (!contractAlreadyDeployed) { + zkChain = IChainTypeManager(ctm).forwardedBridgeMint(bridgehubData.chainId, bridgehubData.ctmData); + require(zkChain != address(0), "BH: chain not registered"); + _registerNewZKChain(bridgehubData.chainId, zkChain); + messageRoot.addNewChain(bridgehubData.chainId); + } + + IZKChain(zkChain).forwardedBridgeMint(bridgehubData.chainData, contractAlreadyDeployed); + + emit MigrationFinalized(bridgehubData.chainId, _assetId, zkChain); + } + + /// @dev IL1AssetHandler interface, used to undo a failed migration of a chain. + // / @param _chainId the chainId of the chain + /// @param _assetId the assetId of the chain's CTM + /// @param _data the data for the recovery. + function bridgeRecoverFailedTransfer( + uint256, + bytes32 _assetId, + address _depositSender, + bytes calldata _data + ) external payable override onlyAssetRouter onlyL1 { + BridgehubBurnCTMAssetData memory bridgehubData = abi.decode(_data, (BridgehubBurnCTMAssetData)); + + delete settlementLayer[bridgehubData.chainId]; + + IChainTypeManager(chainTypeManager[bridgehubData.chainId]).forwardedBridgeRecoverFailedTransfer({ + _chainId: bridgehubData.chainId, + _assetInfo: _assetId, + _depositSender: _depositSender, + _ctmData: bridgehubData.ctmData + }); + + IZKChain(getZKChain(bridgehubData.chainId)).forwardedBridgeRecoverFailedTransfer({ + _chainId: bridgehubData.chainId, + _assetInfo: _assetId, + _originalCaller: _depositSender, + _chainData: bridgehubData.chainData + }); + } + + /// @dev Registers an already deployed chain with the bridgehub + /// @param _chainId The chain Id of the chain + /// @param _zkChain Address of the zkChain + function registerAlreadyDeployedZKChain(uint256 _chainId, address _zkChain) external onlyOwner onlyL1 { + if (_zkChain == address(0)) { + revert ZeroAddress(); + } + if (zkChainMap.contains(_chainId)) { + revert ChainIdAlreadyExists(); + } + if (IZKChain(_zkChain).getChainId() != _chainId) { + revert ChainIdMismatch(); + } + + address ctm = IZKChain(_zkChain).getChainTypeManager(); + address chainAdmin = IZKChain(_zkChain).getAdmin(); + bytes32 chainBaseTokenAssetId = IZKChain(_zkChain).getBaseTokenAssetId(); + address bridgeHub = IZKChain(_zkChain).getBridgehub(); + + if (bridgeHub != address(this)) { + revert IncorrectBridgeHubAddress(bridgeHub); + } + + _validateChainParams({_chainId: _chainId, _assetId: chainBaseTokenAssetId, _chainTypeManager: ctm}); + + chainTypeManager[_chainId] = ctm; + + baseTokenAssetId[_chainId] = chainBaseTokenAssetId; + settlementLayer[_chainId] = block.chainid; + + _registerNewZKChain(_chainId, _zkChain); + messageRoot.addNewChain(_chainId); + + emit NewChain(_chainId, ctm, chainAdmin); + } + + function _validateChainParams(uint256 _chainId, bytes32 _assetId, address _chainTypeManager) internal view { + if (_chainId == 0) { + revert ZeroChainId(); + } + + if (_chainId > type(uint48).max) { + revert ChainIdTooBig(); + } + + if (_chainId == block.chainid) { + revert ChainIdCantBeCurrentChain(); + } + + if (_chainTypeManager == address(0)) { + revert ZeroAddress(); + } + if (_assetId == bytes32(0)) { + revert EmptyAssetId(); + } + + if (!chainTypeManagerIsRegistered[_chainTypeManager]) { + revert CTMNotRegistered(); + } + + if (!assetIdIsRegistered[_assetId]) { + revert AssetIdNotSupported(_assetId); + } + + if (assetRouter == address(0)) { + revert SharedBridgeNotSet(); + } + if (chainTypeManager[_chainId] != address(0)) { + revert BridgeHubAlreadyRegistered(); + } + } + /*////////////////////////////////////////////////////////////// PAUSE //////////////////////////////////////////////////////////////*/ @@ -337,4 +879,28 @@ contract Bridgehub is IBridgehub, ReentrancyGuard, Ownable2StepUpgradeable, Paus function unpause() external onlyOwner { _unpause(); } + + /// @notice Pauses migration functions. + function pauseMigration() external onlyOwner { + migrationPaused = true; + } + + /// @notice Unpauses migration functions. + function unpauseMigration() external onlyOwner { + migrationPaused = false; + } + + /*////////////////////////////////////////////////////////////// + Legacy functions + //////////////////////////////////////////////////////////////*/ + + /// @notice return the ZK chain contract for a chainId + function getHyperchain(uint256 _chainId) public view returns (address) { + return getZKChain(_chainId); + } + + /// @notice return the asset router + function sharedBridge() public view returns (address) { + return assetRouter; + } } diff --git a/l1-contracts/contracts/bridgehub/CTMDeploymentTracker.sol b/l1-contracts/contracts/bridgehub/CTMDeploymentTracker.sol new file mode 100644 index 000000000..b82ad213b --- /dev/null +++ b/l1-contracts/contracts/bridgehub/CTMDeploymentTracker.sol @@ -0,0 +1,147 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable reason-string, gas-custom-errors + +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/security/PausableUpgradeable.sol"; + +import {IBridgehub, L2TransactionRequestTwoBridgesInner} from "./IBridgehub.sol"; +import {ICTMDeploymentTracker} from "./ICTMDeploymentTracker.sol"; + +import {IAssetRouterBase} from "../bridge/asset-router/IAssetRouterBase.sol"; +import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; +import {TWO_BRIDGES_MAGIC_VALUE} from "../common/Config.sol"; +import {L2_BRIDGEHUB_ADDR} from "../common/L2ContractAddresses.sol"; + +/// @dev The encoding version of the data. +bytes1 constant CTM_DEPLOYMENT_TRACKER_ENCODING_VERSION = 0x01; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Contract to be deployed on L1, can link together other contracts based on AssetInfo. +contract CTMDeploymentTracker is ICTMDeploymentTracker, ReentrancyGuard, Ownable2StepUpgradeable, PausableUpgradeable { + /// @dev Bridgehub smart contract that is used to operate with L2 via asynchronous L2 <-> L1 communication. + IBridgehub public immutable override BRIDGE_HUB; + + /// @dev Bridgehub smart contract that is used to operate with L2 via asynchronous L2 <-> L1 communication. + IAssetRouterBase public immutable override L1_ASSET_ROUTER; + + /// @notice Checks that the message sender is the bridgehub. + modifier onlyBridgehub() { + // solhint-disable-next-line gas-custom-errors + require(msg.sender == address(BRIDGE_HUB), "CTM DT: not BH"); + _; + } + + /// @notice Checks that the message sender is the bridgehub. + modifier onlyOwnerViaRouter(address _originalCaller) { + // solhint-disable-next-line gas-custom-errors + require(msg.sender == address(L1_ASSET_ROUTER) && _originalCaller == owner(), "CTM DT: not owner via router"); + _; + } + + /// @dev Contract is expected to be used as proxy implementation on L1. + /// @dev Initialize the implementation to prevent Parity hack. + constructor(IBridgehub _bridgehub, IAssetRouterBase _sharedBridge) reentrancyGuardInitializer { + _disableInitializers(); + BRIDGE_HUB = _bridgehub; + L1_ASSET_ROUTER = _sharedBridge; + } + + /// @notice used to initialize the contract + /// @param _owner the owner of the contract + function initialize(address _owner) external reentrancyGuardInitializer { + _transferOwnership(_owner); + } + + /// @notice Used to register the ctm asset in L1 contracts, AssetRouter and Bridgehub. + /// @param _ctmAddress the address of the ctm asset + function registerCTMAssetOnL1(address _ctmAddress) external onlyOwner { + // solhint-disable-next-line gas-custom-errors + + require(BRIDGE_HUB.chainTypeManagerIsRegistered(_ctmAddress), "CTMDT: ctm not registered"); + L1_ASSET_ROUTER.setAssetHandlerAddressThisChain(bytes32(uint256(uint160(_ctmAddress))), address(BRIDGE_HUB)); + BRIDGE_HUB.setAssetHandlerAddress(bytes32(uint256(uint160(_ctmAddress))), _ctmAddress); + } + + /// @notice The function responsible for registering the L2 counterpart of an CTM asset on the L2 Bridgehub. + /// @dev The function is called by the Bridgehub contract during the `Bridgehub.requestL2TransactionTwoBridges`. + /// @dev Since the L2 settlement layers `_chainId` might potentially have ERC20 tokens as native assets, + /// there are two ways to perform the L1->L2 transaction: + /// - via the `Bridgehub.requestL2TransactionDirect`. However, this would require the CTMDeploymentTracker to + /// handle the ERC20 balances to be used in the transaction. + /// - via the `Bridgehub.requestL2TransactionTwoBridges`. This way it will be the sender that provides the funds + /// for the L2 transaction. + /// The second approach is used due to its simplicity even though it gives the sender slightly more control over the call: + /// `gasLimit`, etc. + /// @param _chainId the chainId of the chain + /// @param _originalCaller the previous message sender + /// @param _data the data of the transaction + // slither-disable-next-line locked-ether + function bridgehubDeposit( + uint256 _chainId, + address _originalCaller, + uint256, + bytes calldata _data + ) external payable onlyBridgehub returns (L2TransactionRequestTwoBridgesInner memory request) { + // solhint-disable-next-line gas-custom-errors + + require(msg.value == 0, "CTMDT: no eth allowed"); + // solhint-disable-next-line gas-custom-errors + + require(_originalCaller == owner(), "CTMDT: not owner"); + bytes1 encodingVersion = _data[0]; + require(encodingVersion == CTM_DEPLOYMENT_TRACKER_ENCODING_VERSION, "CTMDT: wrong encoding version"); + (address _ctmL1Address, address _ctmL2Address) = abi.decode(_data[1:], (address, address)); + + request = _registerCTMAssetOnL2Bridgehub(_chainId, _ctmL1Address, _ctmL2Address); + } + + /// @notice The function called by the Bridgehub after the L2 transaction has been initiated. + /// @dev Not used in this contract. In case the transaction fails, we can just re-try it. + function bridgehubConfirmL2Transaction(uint256 _chainId, bytes32 _txDataHash, bytes32 _txHash) external {} + + /// @notice Used to register the ctm asset in L2 AssetRouter. + /// @param _originalCaller the address that called the Router + /// @param _assetHandlerAddressOnCounterpart the address of the asset handler on the counterpart chain. + function bridgeCheckCounterpartAddress( + uint256, + bytes32, + address _originalCaller, + address _assetHandlerAddressOnCounterpart + ) external view override onlyOwnerViaRouter(_originalCaller) { + require(_assetHandlerAddressOnCounterpart == L2_BRIDGEHUB_ADDR, "CTMDT: wrong counter part"); + } + + function calculateAssetId(address _l1CTM) public view override returns (bytes32) { + return keccak256(abi.encode(block.chainid, address(this), bytes32(uint256(uint160(_l1CTM))))); + } + + /// @notice Used to register the ctm asset in L2 Bridgehub. + /// @param _chainId the chainId of the chain + function _registerCTMAssetOnL2Bridgehub( + // solhint-disable-next-line no-unused-vars + uint256 _chainId, + address _ctmL1Address, + address _ctmL2Address + ) internal pure returns (L2TransactionRequestTwoBridgesInner memory request) { + bytes memory l2TxCalldata = abi.encodeCall( + IBridgehub.setAssetHandlerAddress, + (bytes32(uint256(uint160(_ctmL1Address))), _ctmL2Address) + ); + + request = L2TransactionRequestTwoBridgesInner({ + magicValue: TWO_BRIDGES_MAGIC_VALUE, + l2Contract: L2_BRIDGEHUB_ADDR, + l2Calldata: l2TxCalldata, + factoryDeps: new bytes[](0), + // The `txDataHash` is typically used in usual ERC20 bridges to commit to the transaction data + // so that the user can recover funds in case the bridging fails on L2. + // However, this contract uses the `requestL2TransactionTwoBridges` method just to perform an L1->L2 transaction. + // We do not need to recover anything and so `bytes32(0)` here is okay. + txDataHash: bytes32(0) + }); + } +} diff --git a/l1-contracts/contracts/bridgehub/IBridgehub.sol b/l1-contracts/contracts/bridgehub/IBridgehub.sol index 1204bc1d4..3f05bba35 100644 --- a/l1-contracts/contracts/bridgehub/IBridgehub.sol +++ b/l1-contracts/contracts/bridgehub/IBridgehub.sol @@ -1,9 +1,12 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {IL1SharedBridge} from "../bridge/interfaces/IL1SharedBridge.sol"; import {L2Message, L2Log, TxStatus} from "../common/Messaging.sol"; +import {IL1AssetHandler} from "../bridge/interfaces/IL1AssetHandler.sol"; +import {ICTMDeploymentTracker} from "./ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "./IMessageRoot.sol"; +import {IAssetHandler} from "../bridge/interfaces/IAssetHandler.sol"; struct L2TransactionRequestDirect { uint256 chainId; @@ -37,7 +40,22 @@ struct L2TransactionRequestTwoBridgesInner { bytes32 txDataHash; } -interface IBridgehub { +struct BridgehubMintCTMAssetData { + uint256 chainId; + bytes32 baseTokenAssetId; + bytes ctmData; + bytes chainData; +} + +struct BridgehubBurnCTMAssetData { + uint256 chainId; + bytes ctmData; + bytes chainData; +} + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IBridgehub is IAssetHandler, IL1AssetHandler { /// @notice pendingAdmin is changed /// @dev Also emitted when new admin is accepted and in this case, `newPendingAdmin` would be zero address event NewPendingAdmin(address indexed oldPendingAdmin, address indexed newPendingAdmin); @@ -45,7 +63,29 @@ interface IBridgehub { /// @notice Admin changed event NewAdmin(address indexed oldAdmin, address indexed newAdmin); - /// @notice Starts the transfer of admin rights. Only the current admin can propose a new pending one. + /// @notice CTM asset registered + event AssetRegistered( + bytes32 indexed assetInfo, + address indexed _assetAddress, + bytes32 indexed additionalData, + address sender + ); + + event SettlementLayerRegistered(uint256 indexed chainId, bool indexed isWhitelisted); + + /// @notice Emitted when the bridging to the chain is started. + /// @param chainId Chain ID of the ZK chain + /// @param assetId Asset ID of the token for the zkChain's CTM + /// @param settlementLayerChainId The chain id of the settlement layer the chain migrates to. + event MigrationStarted(uint256 indexed chainId, bytes32 indexed assetId, uint256 indexed settlementLayerChainId); + + /// @notice Emitted when the bridging to the chain is complete. + /// @param chainId Chain ID of the ZK chain + /// @param assetId Asset ID of the token for the zkChain's CTM + /// @param zkChain The address of the ZK chain on the chain where it is migrated to. + event MigrationFinalized(uint256 indexed chainId, bytes32 indexed assetId, address indexed zkChain); + + /// @notice Starts the transfer of admin rights. Only the current admin or owner can propose a new pending one. /// @notice New admin can accept admin rights by calling `acceptAdmin` function. /// @param _newPendingAdmin Address of the new admin function setPendingAdmin(address _newPendingAdmin) external; @@ -54,17 +94,29 @@ interface IBridgehub { function acceptAdmin() external; /// Getters - function stateTransitionManagerIsRegistered(address _stateTransitionManager) external view returns (bool); + function chainTypeManagerIsRegistered(address _chainTypeManager) external view returns (bool); - function stateTransitionManager(uint256 _chainId) external view returns (address); + function chainTypeManager(uint256 _chainId) external view returns (address); - function tokenIsRegistered(address _baseToken) external view returns (bool); + function assetIdIsRegistered(bytes32 _baseTokenAssetId) external view returns (bool); function baseToken(uint256 _chainId) external view returns (address); - function sharedBridge() external view returns (IL1SharedBridge); + function baseTokenAssetId(uint256 _chainId) external view returns (bytes32); - function getHyperchain(uint256 _chainId) external view returns (address); + function sharedBridge() external view returns (address); + + function messageRoot() external view returns (IMessageRoot); + + function getZKChain(uint256 _chainId) external view returns (address); + + function getAllZKChains() external view returns (address[] memory); + + function getAllZKChainChainIDs() external view returns (uint256[] memory); + + function migrationPaused() external view returns (bool); + + function admin() external view returns (address); /// Mailbox forwarder @@ -113,20 +165,75 @@ interface IBridgehub { function createNewChain( uint256 _chainId, - address _stateTransitionManager, - address _baseToken, + address _chainTypeManager, + bytes32 _baseTokenAssetId, uint256 _salt, address _admin, - bytes calldata _initData + bytes calldata _initData, + bytes[] calldata _factoryDeps ) external returns (uint256 chainId); - function addStateTransitionManager(address _stateTransitionManager) external; + function addChainTypeManager(address _chainTypeManager) external; + + function removeChainTypeManager(address _chainTypeManager) external; + + function addTokenAssetId(bytes32 _baseTokenAssetId) external; + + function setAddresses( + address _sharedBridge, + ICTMDeploymentTracker _l1CtmDeployer, + IMessageRoot _messageRoot + ) external; - function removeStateTransitionManager(address _stateTransitionManager) external; + event NewChain(uint256 indexed chainId, address chainTypeManager, address indexed chainGovernance); - function addToken(address _token) external; + event ChainTypeManagerAdded(address indexed chainTypeManager); - function setSharedBridge(address _sharedBridge) external; + event ChainTypeManagerRemoved(address indexed chainTypeManager); - event NewChain(uint256 indexed chainId, address stateTransitionManager, address indexed chainGovernance); + event BaseTokenAssetIdRegistered(bytes32 indexed assetId); + + function whitelistedSettlementLayers(uint256 _chainId) external view returns (bool); + + function registerSettlementLayer(uint256 _newSettlementLayerChainId, bool _isWhitelisted) external; + + function settlementLayer(uint256 _chainId) external view returns (uint256); + + // function finalizeMigrationToGateway( + // uint256 _chainId, + // address _baseToken, + // address _sharedBridge, + // address _admin, + // uint256 _expectedProtocolVersion, + // ZKChainCommitment calldata _commitment, + // bytes calldata _diamondCut + // ) external; + + function forwardTransactionOnGateway( + uint256 _chainId, + bytes32 _canonicalTxHash, + uint64 _expirationTimestamp + ) external; + + function ctmAssetIdFromChainId(uint256 _chainId) external view returns (bytes32); + + function ctmAssetIdFromAddress(address _ctmAddress) external view returns (bytes32); + + function l1CtmDeployer() external view returns (ICTMDeploymentTracker); + + function ctmAssetIdToAddress(bytes32 _assetInfo) external view returns (address); + + function setAssetHandlerAddress(bytes32 _additionalData, address _assetAddress) external; + + function L1_CHAIN_ID() external view returns (uint256); + + function setLegacyBaseTokenAssetId(uint256 _chainId) external; + + function registerAlreadyDeployedZKChain(uint256 _chainId, address _hyperchain) external; + + function setLegacyChainAddress(uint256 _chainId) external; + + /// @notice return the ZK chain contract for a chainId + /// @dev It is a legacy method. Do not use! + function getHyperchain(uint256 _chainId) external view returns (address); } diff --git a/l1-contracts/contracts/bridgehub/ICTMDeploymentTracker.sol b/l1-contracts/contracts/bridgehub/ICTMDeploymentTracker.sol new file mode 100644 index 000000000..1b7558f29 --- /dev/null +++ b/l1-contracts/contracts/bridgehub/ICTMDeploymentTracker.sol @@ -0,0 +1,26 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {L2TransactionRequestTwoBridgesInner, IBridgehub} from "./IBridgehub.sol"; +import {IAssetRouterBase} from "../bridge/asset-router/IAssetRouterBase.sol"; +import {IL1AssetDeploymentTracker} from "../bridge/interfaces/IL1AssetDeploymentTracker.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface ICTMDeploymentTracker is IL1AssetDeploymentTracker { + function bridgehubDeposit( + uint256 _chainId, + address _originalCaller, + uint256 _l2Value, + bytes calldata _data + ) external payable returns (L2TransactionRequestTwoBridgesInner memory request); + + function BRIDGE_HUB() external view returns (IBridgehub); + + function L1_ASSET_ROUTER() external view returns (IAssetRouterBase); + + function registerCTMAssetOnL1(address _ctmAddress) external; + + function calculateAssetId(address _l1CTM) external view returns (bytes32); +} diff --git a/l1-contracts/contracts/bridgehub/IMessageRoot.sol b/l1-contracts/contracts/bridgehub/IMessageRoot.sol new file mode 100644 index 000000000..2e15e6f63 --- /dev/null +++ b/l1-contracts/contracts/bridgehub/IMessageRoot.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IBridgehub} from "./IBridgehub.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IMessageRoot { + function BRIDGE_HUB() external view returns (IBridgehub); + + function addNewChain(uint256 _chainId) external; + + function addChainBatchRoot(uint256 _chainId, uint256 _batchNumber, bytes32 _chainBatchRoot) external; +} diff --git a/l1-contracts/contracts/bridgehub/MessageRoot.sol b/l1-contracts/contracts/bridgehub/MessageRoot.sol new file mode 100644 index 000000000..3d81b990f --- /dev/null +++ b/l1-contracts/contracts/bridgehub/MessageRoot.sol @@ -0,0 +1,162 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable reason-string, gas-custom-errors + +import {DynamicIncrementalMerkle} from "../common/libraries/DynamicIncrementalMerkle.sol"; + +import {IBridgehub} from "./IBridgehub.sol"; +import {IMessageRoot} from "./IMessageRoot.sol"; +import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; + +import {FullMerkle} from "../common/libraries/FullMerkle.sol"; + +import {MessageHashing} from "../common/libraries/MessageHashing.sol"; + +import {MAX_NUMBER_OF_ZK_CHAINS} from "../common/Config.sol"; + +// Chain tree consists of batch commitments as their leaves. We use hash of "new bytes(96)" as the hash of an empty leaf. +bytes32 constant CHAIN_TREE_EMPTY_ENTRY_HASH = bytes32( + 0x46700b4d40ac5c35af2c22dda2787a91eb567b06c924a8fb8ae9a05b20c08c21 +); + +// Chain tree consists of batch commitments as their leaves. We use hash of "new bytes(96)" as the hash of an empty leaf. +bytes32 constant SHARED_ROOT_TREE_EMPTY_HASH = bytes32( + 0x46700b4d40ac5c35af2c22dda2787a91eb567b06c924a8fb8ae9a05b20c08c21 +); + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev The MessageRoot contract is responsible for storing the cross message roots of the chains and the aggregated root of all chains. +contract MessageRoot is IMessageRoot, ReentrancyGuard { + using FullMerkle for FullMerkle.FullTree; + using DynamicIncrementalMerkle for DynamicIncrementalMerkle.Bytes32PushTree; + + event AddedChain(uint256 indexed chainId, uint256 indexed chainIndex); + + event AppendedChainBatchRoot(uint256 indexed chainId, uint256 indexed batchNumber, bytes32 batchRoot); + + event Preimage(bytes32 one, bytes32 two); + + /// @dev Bridgehub smart contract that is used to operate with L2 via asynchronous L2 <-> L1 communication. + IBridgehub public immutable override BRIDGE_HUB; + + /// @notice The number of chains that are registered. + uint256 public chainCount; + + /// @notice The mapping from chainId to chainIndex. Note index 0 is maintained for the chain the contract is on. + mapping(uint256 chainId => uint256 chainIndex) public chainIndex; + + /// @notice The mapping from chainIndex to chainId. + mapping(uint256 chainIndex => uint256 chainId) public chainIndexToId; + + /// @notice The shared full merkle tree storing the aggregate hash. + FullMerkle.FullTree public sharedTree; + + /// @dev The incremental merkle tree storing the chain message roots. + mapping(uint256 chainId => DynamicIncrementalMerkle.Bytes32PushTree tree) internal chainTree; + + /// @notice only the bridgehub can call + modifier onlyBridgehub() { + require(msg.sender == address(BRIDGE_HUB), "MR: only bridgehub"); + _; + } + + /// @notice only the bridgehub can call + /// @param _chainId the chainId of the chain + modifier onlyChain(uint256 _chainId) { + require(msg.sender == BRIDGE_HUB.getZKChain(_chainId), "MR: only chain"); + _; + } + + /// @dev Contract is expected to be used as proxy implementation on L1, but as a system contract on L2. + /// This means we call the _initialize in both the constructor and the initialize functions. + /// @dev Initialize the implementation to prevent Parity hack. + constructor(IBridgehub _bridgehub) reentrancyGuardInitializer { + BRIDGE_HUB = _bridgehub; + _initialize(); + } + + /// @dev Initializes a contract for later use. Expected to be used in the proxy on L1, on L2 it is a system contract without a proxy. + function initialize() external reentrancyGuardInitializer { + _initialize(); + } + + function addNewChain(uint256 _chainId) external onlyBridgehub { + require(!chainRegistered(_chainId), "MR: chain exists"); + _addNewChain(_chainId); + } + + function chainRegistered(uint256 _chainId) public view returns (bool) { + return (_chainId == block.chainid || chainIndex[_chainId] != 0); + } + + /// @dev add a new chainBatchRoot to the chainTree + function addChainBatchRoot( + uint256 _chainId, + uint256 _batchNumber, + bytes32 _chainBatchRoot + ) external onlyChain(_chainId) { + require(chainRegistered(_chainId), "MR: not registered"); + bytes32 chainRoot; + // slither-disable-next-line unused-return + (, chainRoot) = chainTree[_chainId].push(MessageHashing.batchLeafHash(_chainBatchRoot, _batchNumber)); + + // slither-disable-next-line unused-return + sharedTree.updateLeaf(chainIndex[_chainId], MessageHashing.chainIdLeafHash(chainRoot, _chainId)); + + emit Preimage(chainRoot, MessageHashing.chainIdLeafHash(chainRoot, _chainId)); + + emit AppendedChainBatchRoot(_chainId, _batchNumber, _chainBatchRoot); + } + + /// @dev Gets the aggregated root of all chains. + function getAggregatedRoot() external view returns (bytes32) { + if (chainCount == 0) { + return SHARED_ROOT_TREE_EMPTY_HASH; + } + return sharedTree.root(); + } + + /// @dev Gets the message root of a single chain. + /// @param _chainId the chainId of the chain + function getChainRoot(uint256 _chainId) external view returns (bytes32) { + return chainTree[_chainId].root(); + } + + function updateFullTree() public { + uint256 cachedChainCount = chainCount; + bytes32[] memory newLeaves = new bytes32[](cachedChainCount); + for (uint256 i = 0; i < cachedChainCount; ++i) { + newLeaves[i] = MessageHashing.chainIdLeafHash(chainTree[chainIndexToId[i]].root(), chainIndexToId[i]); + } + // slither-disable-next-line unused-return + sharedTree.updateAllLeaves(newLeaves); + } + + function _initialize() internal { + // slither-disable-next-line unused-return + sharedTree.setup(SHARED_ROOT_TREE_EMPTY_HASH); + _addNewChain(block.chainid); + } + + /// @dev Adds a single chain to the message root. + /// @param _chainId the chainId of the chain + function _addNewChain(uint256 _chainId) internal { + uint256 cachedChainCount = chainCount; + require(cachedChainCount < MAX_NUMBER_OF_ZK_CHAINS, "MR: too many chains"); + + ++chainCount; + chainIndex[_chainId] = cachedChainCount; + chainIndexToId[cachedChainCount] = _chainId; + + // slither-disable-next-line unused-return + bytes32 initialHash = chainTree[_chainId].setup(CHAIN_TREE_EMPTY_ENTRY_HASH); + + // slither-disable-next-line unused-return + sharedTree.pushNewLeaf(MessageHashing.chainIdLeafHash(initialHash, _chainId)); + + emit AddedChain(_chainId, cachedChainCount); + } +} diff --git a/l1-contracts/contracts/common/Config.sol b/l1-contracts/contracts/common/Config.sol index 72ca11aa1..a1e58f464 100644 --- a/l1-contracts/contracts/common/Config.sol +++ b/l1-contracts/contracts/common/Config.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @dev `keccak256("")` bytes32 constant EMPTY_STRING_KECCAK = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470; @@ -21,10 +21,10 @@ bytes32 constant L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH = 0x72abee45b59e344af8a6e5202 // TODO: change constant to the real root hash of empty Merkle tree (SMA-184) bytes32 constant DEFAULT_L2_LOGS_TREE_ROOT_HASH = bytes32(0); -/// @dev Denotes the type of the zkSync transaction that came from L1. +/// @dev Denotes the type of the ZKsync transaction that came from L1. uint256 constant PRIORITY_OPERATION_L2_TX_TYPE = 255; -/// @dev Denotes the type of the zkSync transaction that is used for system upgrades. +/// @dev Denotes the type of the ZKsync transaction that is used for system upgrades. uint256 constant SYSTEM_UPGRADE_L2_TX_TYPE = 254; /// @dev The maximal allowed difference between protocol minor versions in an upgrade. The 100 gap is needed @@ -102,9 +102,57 @@ uint256 constant MEMORY_OVERHEAD_GAS = 10; /// @dev The maximum gas limit for a priority transaction in L2. uint256 constant PRIORITY_TX_MAX_GAS_LIMIT = 72_000_000; +/// @dev the address used to identify eth as the base token for chains. address constant ETH_TOKEN_ADDRESS = address(1); +/// @dev the value returned in bridgehubDeposit in the TwoBridges function. bytes32 constant TWO_BRIDGES_MAGIC_VALUE = bytes32(uint256(keccak256("TWO_BRIDGES_MAGIC_VALUE")) - 1); /// @dev https://eips.ethereum.org/EIPS/eip-1352 address constant BRIDGEHUB_MIN_SECOND_BRIDGE_ADDRESS = address(uint160(type(uint16).max)); + +/// @dev the maximum number of supported chains, this is an arbitrary limit. +uint256 constant MAX_NUMBER_OF_ZK_CHAINS = 100; + +/// @dev Used as the `msg.sender` for transactions that relayed via a settlement layer. +address constant SETTLEMENT_LAYER_RELAY_SENDER = address(uint160(0x1111111111111111111111111111111111111111)); + +/// @dev The metadata version that is supported by the ZK Chains to prove that an L2->L1 log was included in a batch. +uint256 constant SUPPORTED_PROOF_METADATA_VERSION = 1; + +/// @dev The virtual address of the L1 settlement layer. +address constant L1_SETTLEMENT_LAYER_VIRTUAL_ADDRESS = address( + uint160(uint256(keccak256("L1_SETTLEMENT_LAYER_VIRTUAL_ADDRESS")) - 1) +); + +struct PriorityTreeCommitment { + uint256 nextLeafIndex; + uint256 startIndex; + uint256 unprocessedIndex; + bytes32[] sides; +} + +// Info that allows to restore a chain. +struct ZKChainCommitment { + /// @notice Total number of executed batches i.e. batches[totalBatchesExecuted] points at the latest executed batch + /// (batch 0 is genesis) + uint256 totalBatchesExecuted; + /// @notice Total number of proved batches i.e. batches[totalBatchesProved] points at the latest proved batch + uint256 totalBatchesVerified; + /// @notice Total number of committed batches i.e. batches[totalBatchesCommitted] points at the latest committed + /// batch + uint256 totalBatchesCommitted; + /// @notice The hash of the L2 system contracts ugpgrade transaction. + /// @dev It is non zero if the migration happens while the upgrade is not yet finalized. + bytes32 l2SystemContractsUpgradeTxHash; + /// @notice The batch when the system contracts upgrade transaction was executed. + /// @dev It is non-zero if the migration happens while the batch where the upgrade tx was present + /// has not been finalized (executed) yet. + uint256 l2SystemContractsUpgradeBatchNumber; + /// @notice The hashes of the batches that are needed to keep the blockchain working. + /// @dev The length of the array is equal to the `totalBatchesCommitted - totalBatchesExecuted + 1`, i.e. we need + /// to store all the unexecuted batches' hashes + 1 latest executed one. + bytes32[] batchHashes; + /// @notice Commitment to the priority merkle tree. + PriorityTreeCommitment priorityTree; +} diff --git a/l1-contracts/contracts/common/Dependencies.sol b/l1-contracts/contracts/common/Dependencies.sol index 6c4d46f2e..fceaa77dd 100644 --- a/l1-contracts/contracts/common/Dependencies.sol +++ b/l1-contracts/contracts/common/Dependencies.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /* solhint-disable-next-line no-unused-import */ -import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; /* solhint-disable-next-line no-unused-import */ -import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol"; +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; diff --git a/l1-contracts/contracts/common/L1ContractErrors.sol b/l1-contracts/contracts/common/L1ContractErrors.sol new file mode 100644 index 000000000..ab1320968 --- /dev/null +++ b/l1-contracts/contracts/common/L1ContractErrors.sol @@ -0,0 +1,450 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.21; + +// 0x5ecf2d7a +error AccessToFallbackDenied(address target, address invoker); +// 0x3995f750 +error AccessToFunctionDenied(address target, bytes4 selector, address invoker); +// 0x6c167909 +error OnlySelfAllowed(); +// 0x52e22c98 +error RestrictionWasNotPresent(address restriction); +// 0xf126e113 +error RestrictionWasAlreadyPresent(address restriction); +// 0x3331e9c0 +error CallNotAllowed(bytes call); +// 0x59e1b0d2 +error ChainZeroAddress(); +// 0xff4bbdf1 +error NotAHyperchain(address chainAddress); +// 0xa3decdf3 +error NotAnAdmin(address expected, address actual); +// 0xf6fd7071 +error RemovingPermanentRestriction(); +// 0xfcb9b2e1 +error UnallowedImplementation(bytes32 implementationHash); +// 0x1ff9d522 +error AddressAlreadyUsed(address addr); +// 0x0dfb42bf +error AddressAlreadySet(address addr); +// 0x86bb51b8 +error AddressHasNoCode(address); +// 0x1f73225f +error AddressMismatch(address expected, address supplied); +// 0x1eee5481 +error AddressTooLow(address); +// 0x5e85ae73 +error AmountMustBeGreaterThanZero(); +// 0xfde974f4 +error AssetHandlerDoesNotExist(bytes32 assetId); +// 0x1294e9e1 +error AssetIdMismatch(bytes32 expected, bytes32 supplied); +// 0xfe919e28 +error AssetIdAlreadyRegistered(); +// 0x0bfcef28 +error AlreadyWhitelisted(address); +// 0x04a0b7e9 +error AssetIdNotSupported(bytes32 assetId); +// 0x6afd6c20 +error BadReturnData(); +// 0x6ef9a972 +error BaseTokenGasPriceDenominatorNotSet(); +// 0x55ad3fd3 +error BatchHashMismatch(bytes32 expected, bytes32 actual); +// 0x2078a6a0 +error BatchNotExecuted(uint256 batchNumber); +// 0xbd4455ff +error BatchNumberMismatch(uint256 expectedBatchNumber, uint256 providedBatchNumber); +// 0xafd53e2f +error BlobHashCommitmentError(uint256 index, bool blobHashEmpty, bool blobCommitmentEmpty); +// 0x6cf12312 +error BridgeHubAlreadyRegistered(); +// 0xdb538614 +error BridgeMintNotImplemented(); +// 0xcf102c5a +error CalldataLengthTooBig(); +// 0xe85392f9 +error CanOnlyProcessOneBatch(); +// 0x00c6ead2 +error CantExecuteUnprovenBatches(); +// 0xe18cb383 +error CantRevertExecutedBatch(); +// 0x24591d89 +error ChainIdAlreadyExists(); +// 0x717a1656 +error ChainIdCantBeCurrentChain(); +// 0xa179f8c9 +error ChainIdMismatch(); +// 0x23f3c357 +error ChainIdNotRegistered(uint256 chainId); +// 0x5de72107 +error ChainNotLegacy(); +// 0x78d2ed02 +error ChainAlreadyLive(); +// 0x8f620a06 +error ChainIdTooBig(); +// 0xf7a01e4d +error DelegateCallFailed(bytes returnData); +// 0x0a8ed92c +error DenominatorIsZero(); +// 0xb4f54111 +error DeployFailed(); +// 0x138ee1a3 +error DeployingBridgedTokenForNativeToken(); +// 0xc7c9660f +error DepositDoesNotExist(); +// 0xad2fa98e +error DepositExists(); +// 0x79cacff1 +error DepositFailed(); +// 0x0e7ee319 +error DiamondAlreadyFrozen(); +// 0x682dabb4 +error DiamondFreezeIncorrectState(); +// 0xa7151b9a +error DiamondNotFrozen(); +// 0x7138356f +error EmptyAddress(); +// 0x2d4d012f +error EmptyAssetId(); +// 0xfc7ab1d3 +error EmptyBlobVersionHash(uint256 index); +// 0x1c25715b +error EmptyBytes32(); +// 0x95b66fe9 +error EmptyDeposit(); +// 0x627e0872 +error ETHDepositNotSupported(); +// +error FailedToTransferTokens(address tokenContract, address to, uint256 amount); +// 0xac4a3f98 +error FacetExists(bytes4 selector, address); +// 0x79e12cc3 +error FacetIsFrozen(bytes4 func); +/// +error FunctionNotSupported(); +// 0xc91cf3b1 +error GasPerPubdataMismatch(); +// 0x6d4a7df8 +error GenesisBatchCommitmentZero(); +// 0x7940c83f +error GenesisBatchHashZero(); +// 0xb4fc6835 +error GenesisIndexStorageZero(); +// 0x3a1a8589 +error GenesisUpgradeZero(); +// 0xd356e6ba +error HashedLogIsDefault(); +// 0x0b08d5be +error HashMismatch(bytes32 expected, bytes32 actual); +// 0xb615c2b1 +error ZKChainLimitReached(); +// +error InsufficientAllowance(uint256 providedAllowance, uint256 requiredAmount); +// 0xdd381a4c +error IncorrectBridgeHubAddress(address bridgehub); +// 0x826fb11e +error InsufficientChainBalance(); +// 0x356680b7 +error InsufficientFunds(); +// 0xcbd9d2e0 +error InvalidCaller(address); +// 0x7a47c9a2 +error InvalidChainId(); +// 0x4fbe5dba +error InvalidDelay(); +// 0x0af806e0 +error InvalidHash(); +// +error InvalidInput(); +// 0xc1780bd6 +error InvalidLogSender(address sender, uint256 logKey); +// 0xd8e9405c +error InvalidNumberOfBlobs(uint256 expected, uint256 numCommitments, uint256 numHashes); +// 0x09bde339 +error InvalidProof(); +// 0x5428eae7 +error InvalidProtocolVersion(); +// 0x53e6d04d +error InvalidPubdataCommitmentsSize(); +// 0x5513177c +error InvalidPubdataHash(bytes32 expectedHash, bytes32 provided); +// 0x9094af7e +error InvalidPubdataLength(); +// 0xc5d09071 +error InvalidPubdataMode(); +// 0x6f1cf752 +error InvalidPubdataPricingMode(); +// 0x12ba286f +error InvalidSelector(bytes4 func); +// 0x5cb29523 +error InvalidTxType(uint256 txType); +// 0x5f1aa154 +error InvalidUpgradeTxn(UpgradeTxVerifyParam); +// 0xaa7feadc +error InvalidValue(); +// 0x888b2f09 +error L1TokenDeploymentWithZeroChainId(bytes32 assetId); +// 0xa4f62e33 +error L2BridgeNotDeployed(uint256 chainId); +// 0xff8811ff +error L2BridgeNotSet(uint256 chainId); +// 0xcb5e4247 +error L2BytecodeHashMismatch(bytes32 expected, bytes32 provided); +// 0xfb5c22e6 +error L2TimestampTooBig(); +// 0xd2c011d6 +error L2UpgradeNonceNotEqualToNewProtocolVersion(uint256 nonce, uint256 protocolVersion); +// 0x97e1359e +error L2WithdrawalMessageWrongLength(uint256 messageLen); +// 0x32eb8b2f +error LegacyMethodIsSupportedOnlyForEra(); +// 0xe37d2c02 +error LengthIsNotDivisibleBy32(uint256 length); +// 0x1b6825bb +error LogAlreadyProcessed(uint8); +// 0x43e266b0 +error MalformedBytecode(BytecodeError); +// 0x59170bf0 +error MalformedCalldata(); +// 0x16509b9a +error MalformedMessage(); +// 0x9bb54c35 +error MerkleIndexOutOfBounds(); +// 0x8e23ac1a +error MerklePathEmpty(); +// 0x1c500385 +error MerklePathOutOfBounds(); +// 0x3312a450 +error MigrationPaused(); +// 0xfa44b527 +error MissingSystemLogs(uint256 expected, uint256 actual); +// 0x4a094431 +error MsgValueMismatch(uint256 expectedMsgValue, uint256 providedMsgValue); +// 0xb385a3da +error MsgValueTooLow(uint256 required, uint256 provided); +// 0x72ea85ad +error NewProtocolMajorVersionNotZero(); +// 0x79cc2d22 +error NoCallsProvided(); +// 0xa6fef710 +error NoFunctionsForDiamondCut(); +// 0xcab098d8 +error NoFundsTransferred(); +// 0x92290acc +error NonEmptyBlobVersionHash(uint256 index); +// 0xc21b1ab7 +error NonEmptyCalldata(); +// 0x536ec84b +error NonEmptyMsgValue(); +// 0xd018e08e +error NonIncreasingTimestamp(); +// 0x0105f9c0 +error NonSequentialBatch(); +// 0x0ac76f01 +error NonSequentialVersion(); +// 0x4ef79e5a +error NonZeroAddress(address); +// 0xdd629f86 +error NotEnoughGas(); +// 0xdd7e3621 +error NotInitializedReentrancyGuard(); +// 0xdf17e316 +error NotWhitelisted(address); +// 0xf3ed9dfa +error OnlyEraSupported(); +// 0x1a21feed +error OperationExists(); +// 0xeda2fbb1 +error OperationMustBePending(); +// 0xe1c1ff37 +error OperationMustBeReady(); +// 0xb926450e +error OriginChainIdNotFound(); +// 0xd7f50a9d +error PatchCantSetUpgradeTxn(); +// 0x962fd7d0 +error PatchUpgradeCantSetBootloader(); +// 0x559cc34e +error PatchUpgradeCantSetDefaultAccount(); +// 0x8d5851de +error PointEvalCallFailed(bytes); +// 0x4daa985d +error PointEvalFailed(bytes); +// 0x9b48e060 +error PreviousOperationNotExecuted(); +// 0x5c598b60 +error PreviousProtocolMajorVersionNotZero(); +// 0xa0f47245 +error PreviousUpgradeNotCleaned(); +// 0x101ba748 +error PreviousUpgradeNotFinalized(bytes32 txHash); +// 0xd5a99014 +error PriorityOperationsRollingHashMismatch(); +// 0x1a4d284a +error PriorityTxPubdataExceedsMaxPubDataPerBatch(); +// 0xa461f651 +error ProtocolIdMismatch(uint256 expectedProtocolVersion, uint256 providedProtocolId); +// 0x64f94ec2 +error ProtocolIdNotGreater(); +// 0xd328c12a +error ProtocolVersionMinorDeltaTooBig(uint256 limit, uint256 proposed); +// 0x88d7b498 +error ProtocolVersionTooSmall(); +// 0x53dee67b +error PubdataCommitmentsEmpty(); +// 0x7734c31a +error PubdataCommitmentsTooBig(); +// 0x959f26fb +error PubdataGreaterThanLimit(uint256 limit, uint256 length); +// 0x2a4a14df +error PubdataPerBatchIsLessThanTxn(); +// 0x63c36549 +error QueueIsEmpty(); +// 0xab143c06 +error Reentrancy(); +// 0x667d17de +error RemoveFunctionFacetAddressNotZero(address facet); +// 0xa2d4b16c +error RemoveFunctionFacetAddressZero(); +// 0x3580370c +error ReplaceFunctionFacetAddressZero(); +// 0xdab52f4b +error RevertedBatchBeforeNewBatch(); +// 0x9a67c1cb +error RevertedBatchNotAfterNewLastBatch(); +// 0xd3b6535b +error SelectorsMustAllHaveSameFreezability(); +// 0x7774d2f9 +error SharedBridgeValueNotSet(SharedBridgeKey); +// 0xc1d9246c +error SharedBridgeBalanceMismatch(); +// 0x856d5b77 +error SharedBridgeNotSet(); +// 0xcac5fc40 +error SharedBridgeValueAlreadySet(SharedBridgeKey); +// 0xdf3a8fdd +error SlotOccupied(); +// 0xd0bc70cf +error CTMAlreadyRegistered(); +// 0x09865e10 +error CTMNotRegistered(); +// 0xae43b424 +error SystemLogsSizeTooBig(); +// 0x08753982 +error TimeNotReached(uint256 expectedTimestamp, uint256 actualTimestamp); +// 0x2d50c33b +error TimestampError(); +// 0x4f4b634e +error TokenAlreadyRegistered(address token); +// 0xddef98d7 +error TokenNotRegistered(address token); +// 0x06439c6b +error TokenNotSupported(address token); +// 0x23830e28 +error TokensWithFeesNotSupported(); +// 0xf640f0e5 +error TooManyBlobs(); +// 0x76da24b9 +error TooManyFactoryDeps(); +// 0xf0b4e88f +error TooMuchGas(); +// 0x00c5a6a9 +error TransactionNotAllowed(); +// 0x4c991078 +error TxHashMismatch(); +// 0x2e311df8 +error TxnBodyGasLimitNotEnoughGas(); +// 0x8e4a23d6 +error Unauthorized(address caller); +// 0xe52478c7 +error UndefinedDiamondCutAction(); +// 0x07218375 +error UnexpectedNumberOfFactoryDeps(); +// 0x6aa39880 +error UnexpectedSystemLog(uint256 logKey); +// +error UnimplementedMessage(string); +// 0xf093c2e5 +error UpgradeBatchNumberIsNotZero(); +// 0x084a1449 +error UnsupportedEncodingVersion(); +// +error UnsupportedPaymasterFlow(); +// 0x47b3b145 +error ValidateTxnNotEnoughGas(); +// 0x626ade30 +error ValueMismatch(uint256 expected, uint256 actual); +// 0xe1022469 +error VerifiedBatchesExceedsCommittedBatches(); +// 0x2dbdba00 +error VerifyProofCommittedVerifiedMismatch(); +// 0xae899454 +error WithdrawalAlreadyFinalized(); +// 0x27fcd9d1 +error WithdrawalFailed(); +// 0x750b219c +error WithdrawFailed(); +// 0x15e8e429 +error WrongMagicValue(uint256 expectedMagicValue, uint256 providedMagicValue); +// 0xd92e233d +error ZeroAddress(); +// 0x669567ea +error ZeroBalance(); +// 0xc84885d4 +error ZeroChainId(); +// 0x520aa59c +error PubdataIsEmpty(); +// 0x99d8fec9 +error EmptyData(); +// 0xc99a8360 +error UnsupportedCommitBatchEncoding(uint8 version); +// 0xe167e4a6 +error UnsupportedProofBatchEncoding(uint8 version); +// 0xe8e3f6f4 +error UnsupportedExecuteBatchEncoding(uint8 version); +// 0xd7d93e1f +error IncorrectBatchBounds( + uint256 processFromExpected, + uint256 processToExpected, + uint256 processFromProvided, + uint256 processToProvided +); +// 0x64107968 +error AssetHandlerNotRegistered(bytes32 assetId); +// 0x10f30e75 +error NotBridgehub(address addr); +// 0x2554babc +error InvalidAddress(address expected, address actual); +// 0xfa5cd00f +error NotAllowed(address addr); + +enum SharedBridgeKey { + PostUpgradeFirstBatch, + LegacyBridgeFirstBatch, + LegacyBridgeLastDepositBatch, + LegacyBridgeLastDepositTxn +} + +enum BytecodeError { + Version, + NumberOfWords, + Length, + WordsMustBeOdd +} + +enum UpgradeTxVerifyParam { + From, + To, + Paymaster, + Value, + MaxFeePerGas, + MaxPriorityFeePerGas, + Reserved0, + Reserved1, + Reserved2, + Reserved3, + Signature, + PaymasterInput, + ReservedDynamic +} diff --git a/l1-contracts/contracts/common/L2ContractAddresses.sol b/l1-contracts/contracts/common/L2ContractAddresses.sol index 0becefe72..a8fba013c 100644 --- a/l1-contracts/contracts/common/L2ContractAddresses.sol +++ b/l1-contracts/contracts/common/L2ContractAddresses.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @dev The formal address of the initial program of the system: the bootloader address constant L2_BOOTLOADER_ADDRESS = address(0x8001); @@ -31,3 +31,55 @@ address constant L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR = address(0x800b); /// @dev The address of the pubdata chunk publisher contract address constant L2_PUBDATA_CHUNK_PUBLISHER_ADDR = address(0x8011); + +/// @dev The address used to execute complex upgragedes, also used for the genesis upgrade +address constant L2_COMPLEX_UPGRADER_ADDR = address(0x800f); + +/// @dev The address used to execute the genesis upgrade +address constant L2_GENESIS_UPGRADE_ADDR = address(0x10001); + +/// @dev The address of the L2 bridge hub system contract, used to start L2<>L2 transactions +address constant L2_BRIDGEHUB_ADDR = address(0x10002); + +/// @dev the address of the l2 asset router. +address constant L2_ASSET_ROUTER_ADDR = address(0x10003); + +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice Smart contract for sending arbitrary length messages to L1 + * @dev by default ZkSync can send fixed-length messages on L1. + * A fixed length message has 4 parameters `senderAddress`, `isService`, `key`, `value`, + * the first one is taken from the context, the other three are chosen by the sender. + * @dev To send a variable-length message we use this trick: + * - This system contract accepts an arbitrary length message and sends a fixed length message with + * parameters `senderAddress == this`, `isService == true`, `key == msg.sender`, `value == keccak256(message)`. + * - The contract on L1 accepts all sent messages and if the message came from this system contract + * it requires that the preimage of `value` be provided. + */ +interface IL2Messenger { + /// @notice Sends an arbitrary length message to L1. + /// @param _message The variable length message to be sent to L1. + /// @return Returns the keccak256 hashed value of the message. + function sendToL1(bytes memory _message) external returns (bytes32); +} + +/// @dev An l2 system contract address, used in the assetId calculation for native assets. +/// This is needed for automatic bridging, i.e. without deploying the AssetHandler contract, +/// if the assetId can be calculated with this address then it is in fact an NTV asset +address constant L2_NATIVE_TOKEN_VAULT_ADDR = address(0x10004); + +/// @dev the address of the l2 asse3t router. +address constant L2_MESSAGE_ROOT_ADDR = address(0x10005); + +/// @dev the offset for the system contracts +uint160 constant SYSTEM_CONTRACTS_OFFSET = 0x8000; // 2^15 + +/// @dev the address of the deployer system contract +address constant DEPLOYER_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x06); + +/// @dev the address of the l2 messenger system contract +IL2Messenger constant L2_MESSENGER = IL2Messenger(address(SYSTEM_CONTRACTS_OFFSET + 0x08)); + +/// @dev the address of the msg value system contract +address constant MSG_VALUE_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x09); diff --git a/l1-contracts/contracts/common/Messaging.sol b/l1-contracts/contracts/common/Messaging.sol index 3c934ae5f..a7a2db944 100644 --- a/l1-contracts/contracts/common/Messaging.sol +++ b/l1-contracts/contracts/common/Messaging.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @dev The enum that represents the transaction execution status /// @param Failure The transaction execution failed @@ -122,6 +122,7 @@ struct L2CanonicalTransaction { /// @param factoryDeps The array of L2 bytecodes that the tx depends on. /// @param refundRecipient The recipient of the refund for the transaction on L2. If the transaction fails, then /// this address will receive the `l2Value`. +// solhint-disable-next-line gas-struct-packing struct BridgehubL2TransactionRequest { address sender; address contractL2; diff --git a/l1-contracts/contracts/common/ReentrancyGuard.sol b/l1-contracts/contracts/common/ReentrancyGuard.sol index a19020b77..b1f8e556a 100644 --- a/l1-contracts/contracts/common/ReentrancyGuard.sol +++ b/l1-contracts/contracts/common/ReentrancyGuard.sol @@ -1,6 +1,8 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; +import {SlotOccupied, NotInitializedReentrancyGuard, Reentrancy} from "./L1ContractErrors.sol"; /** * @custom:security-contact security@matterlabs.dev @@ -55,7 +57,9 @@ abstract contract ReentrancyGuard { } // Check that storage slot for reentrancy guard is empty to rule out possibility of slot conflict - require(lockSlotOldValue == 0, "1B"); + if (lockSlotOldValue != 0) { + revert SlotOccupied(); + } } /** @@ -71,8 +75,13 @@ abstract contract ReentrancyGuard { _status := sload(LOCK_FLAG_ADDRESS) } - // On the first call to nonReentrant, _notEntered will be true - require(_status == _NOT_ENTERED, "r1"); + if (_status == 0) { + revert NotInitializedReentrancyGuard(); + } + // On the first call to nonReentrant, _NOT_ENTERED will be true + if (_status != _NOT_ENTERED) { + revert Reentrancy(); + } // Any calls to nonReentrant after this point will fail assembly { diff --git a/l1-contracts/contracts/common/interfaces/IL1Messenger.sol b/l1-contracts/contracts/common/interfaces/IL1Messenger.sol new file mode 100644 index 000000000..f0557487b --- /dev/null +++ b/l1-contracts/contracts/common/interfaces/IL1Messenger.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice The interface of the L1 Messenger contract, responsible for sending messages to L1. + */ +interface IL1Messenger { + function sendToL1(bytes memory _message) external returns (bytes32); +} diff --git a/l1-contracts/contracts/common/interfaces/IL2ContractDeployer.sol b/l1-contracts/contracts/common/interfaces/IL2ContractDeployer.sol index 31d796d45..015442dd9 100644 --- a/l1-contracts/contracts/common/interfaces/IL2ContractDeployer.sol +++ b/l1-contracts/contracts/common/interfaces/IL2ContractDeployer.sol @@ -1,10 +1,10 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /** * @author Matter Labs - * @notice System smart contract that is responsible for deploying other smart contracts on a zkSync hyperchain. + * @notice System smart contract that is responsible for deploying other smart contracts on a ZK chain. */ interface IL2ContractDeployer { /// @notice A struct that describes a forced deployment on an address. diff --git a/l1-contracts/contracts/common/libraries/DataEncoding.sol b/l1-contracts/contracts/common/libraries/DataEncoding.sol new file mode 100644 index 000000000..9df83d67a --- /dev/null +++ b/l1-contracts/contracts/common/libraries/DataEncoding.sol @@ -0,0 +1,152 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "../L2ContractAddresses.sol"; +import {LEGACY_ENCODING_VERSION, NEW_ENCODING_VERSION} from "../../bridge/asset-router/IAssetRouterBase.sol"; +import {INativeTokenVault} from "../../bridge/ntv/INativeTokenVault.sol"; +import {UnsupportedEncodingVersion} from "../L1ContractErrors.sol"; + +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice Helper library for transfer data encoding and decoding to reduce possibility of errors. + */ +library DataEncoding { + /// @notice Abi.encodes the data required for bridgeMint on remote chain. + /// @param _originalCaller The address which initiated the transfer. + /// @param _l2Receiver The address which to receive tokens on remote chain. + /// @param _l1Token The transferred token address. + /// @param _amount The amount of token to be transferred. + /// @param _erc20Metadata The transferred token metadata. + /// @return The encoded bridgeMint data + function encodeBridgeMintData( + address _originalCaller, + address _l2Receiver, + address _l1Token, + uint256 _amount, + bytes memory _erc20Metadata + ) internal pure returns (bytes memory) { + // solhint-disable-next-line func-named-parameters + return abi.encode(_originalCaller, _l2Receiver, _l1Token, _amount, _erc20Metadata); + } + + /// @notice Function decoding transfer data previously encoded with this library. + /// @param _bridgeMintData The encoded bridgeMint data + /// @return _originalCaller The address which initiated the transfer. + /// @return _l2Receiver The address which to receive tokens on remote chain. + /// @return _parsedL1Token The transferred token address. + /// @return _amount The amount of token to be transferred. + /// @return _erc20Metadata The transferred token metadata. + function decodeBridgeMintData( + bytes memory _bridgeMintData + ) + internal + pure + returns ( + address _originalCaller, + address _l2Receiver, + address _parsedL1Token, + uint256 _amount, + bytes memory _erc20Metadata + ) + { + (_originalCaller, _l2Receiver, _parsedL1Token, _amount, _erc20Metadata) = abi.decode( + _bridgeMintData, + (address, address, address, uint256, bytes) + ); + } + + /// @notice Encodes the asset data by combining chain id, asset deployment tracker and asset data. + /// @param _chainId The id of the chain token is native to. + /// @param _assetData The asset data that has to be encoded. + /// @param _sender The asset deployment tracker address. + /// @return The encoded asset data. + function encodeAssetId(uint256 _chainId, bytes32 _assetData, address _sender) internal pure returns (bytes32) { + return keccak256(abi.encode(_chainId, _sender, _assetData)); + } + + /// @notice Encodes the asset data by combining chain id, asset deployment tracker and asset data. + /// @param _chainId The id of the chain token is native to. + /// @param _tokenAaddress The address of token that has to be encoded (asset data is the address itself). + /// @param _sender The asset deployment tracker address. + /// @return The encoded asset data. + function encodeAssetId(uint256 _chainId, address _tokenAaddress, address _sender) internal pure returns (bytes32) { + return keccak256(abi.encode(_chainId, _sender, _tokenAaddress)); + } + + /// @notice Encodes the asset data by combining chain id, NTV as asset deployment tracker and asset data. + /// @param _chainId The id of the chain token is native to. + /// @param _assetData The asset data that has to be encoded. + /// @return The encoded asset data. + function encodeNTVAssetId(uint256 _chainId, bytes32 _assetData) internal pure returns (bytes32) { + return keccak256(abi.encode(_chainId, L2_NATIVE_TOKEN_VAULT_ADDR, _assetData)); + } + + /// @notice Encodes the asset data by combining chain id, NTV as asset deployment tracker and asset data. + /// @param _chainId The id of the chain token is native to. + /// @param _tokenAddress The address of token that has to be encoded (asset data is the address itself). + /// @return The encoded asset data. + function encodeNTVAssetId(uint256 _chainId, address _tokenAddress) internal pure returns (bytes32) { + return keccak256(abi.encode(_chainId, L2_NATIVE_TOKEN_VAULT_ADDR, _tokenAddress)); + } + + /// @dev Encodes the transaction data hash using either the latest encoding standard or the legacy standard. + /// @param _encodingVersion EncodingVersion. + /// @param _originalCaller The address of the entity that initiated the deposit. + /// @param _assetId The unique identifier of the deposited L1 token. + /// @param _nativeTokenVault The address of the token, only used if the encoding version is legacy. + /// @param _transferData The encoded transfer data, which includes both the deposit amount and the address of the L2 receiver. + /// @return txDataHash The resulting encoded transaction data hash. + function encodeTxDataHash( + bytes1 _encodingVersion, + address _originalCaller, + bytes32 _assetId, + address _nativeTokenVault, + bytes memory _transferData + ) internal view returns (bytes32 txDataHash) { + if (_encodingVersion == LEGACY_ENCODING_VERSION) { + address tokenAddress = INativeTokenVault(_nativeTokenVault).tokenAddress(_assetId); + (uint256 depositAmount, ) = abi.decode(_transferData, (uint256, address)); + txDataHash = keccak256(abi.encode(_originalCaller, tokenAddress, depositAmount)); + } else if (_encodingVersion == NEW_ENCODING_VERSION) { + // Similarly to calldata, the txDataHash is collision-resistant. + // In the legacy data hash, the first encoded variable was the address, which is padded with zeros during `abi.encode`. + txDataHash = keccak256( + bytes.concat(_encodingVersion, abi.encode(_originalCaller, _assetId, _transferData)) + ); + } else { + revert UnsupportedEncodingVersion(); + } + } + + /// @notice Decodes the token data by combining chain id, asset deployment tracker and asset data. + function decodeTokenData( + bytes calldata _tokenData + ) internal pure returns (uint256 chainId, bytes memory name, bytes memory symbol, bytes memory decimals) { + bytes1 encodingVersion = _tokenData[0]; + // kl todo check correct + if (encodingVersion == LEGACY_ENCODING_VERSION) { + (name, symbol, decimals) = abi.decode(_tokenData, (bytes, bytes, bytes)); + } else if (encodingVersion == NEW_ENCODING_VERSION) { + return abi.decode(_tokenData[1:], (uint256, bytes, bytes, bytes)); + } else { + revert UnsupportedEncodingVersion(); + } + } + + /// @notice Encodes the token data by combining chain id, asset deployment tracker and asset data. + /// @param _chainId The id of the chain token is native to. + /// @param _name The name of the token. + /// @param _symbol The symbol of the token. + /// @param _decimals The decimals of the token. + /// @return The encoded token data. + function encodeTokenData( + uint256 _chainId, + bytes memory _name, + bytes memory _symbol, + bytes memory _decimals + ) internal pure returns (bytes memory) { + return bytes.concat(NEW_ENCODING_VERSION, abi.encode(_chainId, _name, _symbol, _decimals)); + } +} diff --git a/l1-contracts/contracts/common/libraries/DynamicIncrementalMerkle.sol b/l1-contracts/contracts/common/libraries/DynamicIncrementalMerkle.sol new file mode 100644 index 000000000..b41b665d3 --- /dev/null +++ b/l1-contracts/contracts/common/libraries/DynamicIncrementalMerkle.sol @@ -0,0 +1,147 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Merkle} from "./Merkle.sol"; +import {Arrays} from "@openzeppelin/contracts-v4/utils/Arrays.sol"; + +/** + * @dev Library for managing https://wikipedia.org/wiki/Merkle_Tree[Merkle Tree] data structures. + * + * Each tree is a complete binary tree with the ability to sequentially insert leaves, changing them from a zero to a + * non-zero value and updating its root. This structure allows inserting commitments (or other entries) that are not + * stored, but can be proven to be part of the tree at a later time if the root is kept. See {MerkleProof}. + * + * A tree is defined by the following parameters: + * + * * Depth: The number of levels in the tree, it also defines the maximum number of leaves as 2**depth. + * * Zero value: The value that represents an empty leaf. Used to avoid regular zero values to be part of the tree. + * * Hashing function: A cryptographic hash function used to produce internal nodes. + * + * This is a fork of OpenZeppelin's [`MerkleTree`](https://github.com/OpenZeppelin/openzeppelin-contracts/blob/9af280dc4b45ee5bda96ba47ff829b407eaab67e/contracts/utils/structs/MerkleTree.sol) + * library, with the changes to support dynamic tree growth (doubling the size when full). + */ +library DynamicIncrementalMerkle { + /** + * @dev A complete `bytes32` Merkle tree. + * + * The `sides` and `zero` arrays are set to have a length equal to the depth of the tree during setup. + * + * Struct members have an underscore prefix indicating that they are "private" and should not be read or written to + * directly. Use the functions provided below instead. Modifying the struct manually may violate assumptions and + * lead to unexpected behavior. + * + * NOTE: The `root` and the updates history is not stored within the tree. Consider using a secondary structure to + * store a list of historical roots from the values returned from {setup} and {push} (e.g. a mapping, {BitMaps} or + * {Checkpoints}). + * + * WARNING: Updating any of the tree's parameters after the first insertion will result in a corrupted tree. + */ + struct Bytes32PushTree { + uint256 _nextLeafIndex; + bytes32[] _sides; + bytes32[] _zeros; + } + + /** + * @dev Initialize a {Bytes32PushTree} using {Hashes-Keccak256} to hash internal nodes. + * The capacity of the tree (i.e. number of leaves) is set to `2**levels`. + * + * IMPORTANT: The zero value should be carefully chosen since it will be stored in the tree representing + * empty leaves. It should be a value that is not expected to be part of the tree. + */ + function setup(Bytes32PushTree storage self, bytes32 zero) internal returns (bytes32 initialRoot) { + self._nextLeafIndex = 0; + self._zeros.push(zero); + self._sides.push(bytes32(0)); + return bytes32(0); + } + + /** + * @dev Resets the tree to a blank state. + * Calling this function on MerkleTree that was already setup and used will reset it to a blank state. + * @param zero The value that represents an empty leaf. + * @return initialRoot The initial root of the tree. + */ + function reset(Bytes32PushTree storage self, bytes32 zero) internal returns (bytes32 initialRoot) { + self._nextLeafIndex = 0; + uint256 length = self._zeros.length; + for (uint256 i = length; 0 < i; --i) { + self._zeros.pop(); + } + length = self._sides.length; + for (uint256 i = length; 0 < i; --i) { + self._sides.pop(); + } + self._zeros.push(zero); + self._sides.push(bytes32(0)); + return bytes32(0); + } + + /** + * @dev Insert a new leaf in the tree, and compute the new root. Returns the position of the inserted leaf in the + * tree, and the resulting root. + * + * Hashing the leaf before calling this function is recommended as a protection against + * second pre-image attacks. + */ + function push(Bytes32PushTree storage self, bytes32 leaf) internal returns (uint256 index, bytes32 newRoot) { + // Cache read + uint256 levels = self._zeros.length - 1; + + // Get leaf index + // solhint-disable-next-line gas-increment-by-one + index = self._nextLeafIndex++; + + // Check if tree is full. + if (index == 1 << levels) { + bytes32 zero = self._zeros[levels]; + bytes32 newZero = Merkle.efficientHash(zero, zero); + self._zeros.push(newZero); + self._sides.push(bytes32(0)); + ++levels; + } + + // Rebuild branch from leaf to root + uint256 currentIndex = index; + bytes32 currentLevelHash = leaf; + bool updatedSides = false; + for (uint32 i = 0; i < levels; ++i) { + // Reaching the parent node, is currentLevelHash the left child? + bool isLeft = currentIndex % 2 == 0; + + // If so, next time we will come from the right, so we need to save it + if (isLeft && !updatedSides) { + Arrays.unsafeAccess(self._sides, i).value = currentLevelHash; + updatedSides = true; + } + + // Compute the current node hash by using the hash function + // with either its sibling (side) or the zero value for that level. + currentLevelHash = Merkle.efficientHash( + isLeft ? currentLevelHash : Arrays.unsafeAccess(self._sides, i).value, + isLeft ? Arrays.unsafeAccess(self._zeros, i).value : currentLevelHash + ); + + // Update node index + currentIndex >>= 1; + } + + Arrays.unsafeAccess(self._sides, levels).value = currentLevelHash; + return (index, currentLevelHash); + } + + /** + * @dev Tree's root. + */ + function root(Bytes32PushTree storage self) internal view returns (bytes32) { + return Arrays.unsafeAccess(self._sides, self._sides.length - 1).value; + } + + /** + * @dev Tree's height (does not include the root node). + */ + function height(Bytes32PushTree storage self) internal view returns (uint256) { + return self._sides.length - 1; + } +} diff --git a/l1-contracts/contracts/common/libraries/FullMerkle.sol b/l1-contracts/contracts/common/libraries/FullMerkle.sol new file mode 100644 index 000000000..d39ccde48 --- /dev/null +++ b/l1-contracts/contracts/common/libraries/FullMerkle.sol @@ -0,0 +1,146 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable reason-string, gas-custom-errors + +import {UncheckedMath} from "../../common/libraries/UncheckedMath.sol"; +import {Merkle} from "./Merkle.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +library FullMerkle { + using UncheckedMath for uint256; + + struct FullTree { + uint256 _height; + uint256 _leafNumber; + bytes32[][] _nodes; + bytes32[] _zeros; + } + + /** + * @dev Initialize a {Bytes32PushTree} using {Merkle.efficientHash} to hash internal nodes. + * The capacity of the tree (i.e. number of leaves) is set to `2**levels`. + * + * Calling this function on MerkleTree that was already setup and used will reset it to a blank state. + * + * IMPORTANT: The zero value should be carefully chosen since it will be stored in the tree representing + * empty leaves. It should be a value that is not expected to be part of the tree. + * @param zero The zero value to be used in the tree. + */ + function setup(FullTree storage self, bytes32 zero) internal returns (bytes32 initialRoot) { + // Store depth in the dynamic array + self._zeros.push(zero); + self._nodes.push([zero]); + + return zero; + } + + /** + * @dev Push a new leaf to the tree. + * @param _leaf The leaf to be added to the tree. + */ + function pushNewLeaf(FullTree storage self, bytes32 _leaf) internal returns (bytes32 newRoot) { + // solhint-disable-next-line gas-increment-by-one + uint256 index = self._leafNumber++; + + if (index == 1 << self._height) { + uint256 newHeight = self._height.uncheckedInc(); + self._height = newHeight; + bytes32 topZero = self._zeros[newHeight - 1]; + bytes32 newZero = Merkle.efficientHash(topZero, topZero); + self._zeros.push(newZero); + self._nodes.push([newZero]); + } + if (index != 0) { + uint256 oldMaxNodeNumber = index - 1; + uint256 maxNodeNumber = index; + for (uint256 i; i < self._height; i = i.uncheckedInc()) { + if (oldMaxNodeNumber == maxNodeNumber) { + break; + } + self._nodes[i].push(self._zeros[i]); + maxNodeNumber /= 2; + oldMaxNodeNumber /= 2; + } + } + return updateLeaf(self, index, _leaf); + } + + /** + * @dev Update a leaf at index in the tree. + * @param _index The index of the leaf to be updated. + * @param _itemHash The new hash of the leaf. + */ + function updateLeaf(FullTree storage self, uint256 _index, bytes32 _itemHash) internal returns (bytes32) { + // solhint-disable-next-line gas-custom-errors + uint256 maxNodeNumber = self._leafNumber - 1; + require(_index <= maxNodeNumber, "FMT, wrong index"); + self._nodes[0][_index] = _itemHash; + bytes32 currentHash = _itemHash; + for (uint256 i; i < self._height; i = i.uncheckedInc()) { + if (_index % 2 == 0) { + currentHash = Merkle.efficientHash( + currentHash, + maxNodeNumber == _index ? self._zeros[i] : self._nodes[i][_index + 1] + ); + } else { + currentHash = Merkle.efficientHash(self._nodes[i][_index - 1], currentHash); + } + _index /= 2; + maxNodeNumber /= 2; + self._nodes[i + 1][_index] = currentHash; + } + return currentHash; + } + + /** + * @dev Updated all leaves in the tree. + * @param _newLeaves The new leaves to be added to the tree. + */ + function updateAllLeaves(FullTree storage self, bytes32[] memory _newLeaves) internal returns (bytes32) { + // solhint-disable-next-line gas-custom-errors + require(_newLeaves.length == self._leafNumber, "FMT, wrong length"); + return updateAllNodesAtHeight(self, 0, _newLeaves); + } + + /** + * @dev Update all nodes at a certain height in the tree. + * @param _height The height of the nodes to be updated. + * @param _newNodes The new nodes to be added to the tree. + */ + function updateAllNodesAtHeight( + FullTree storage self, + uint256 _height, + bytes32[] memory _newNodes + ) internal returns (bytes32) { + if (_height == self._height) { + self._nodes[_height][0] = _newNodes[0]; + return _newNodes[0]; + } + + uint256 newRowLength = (_newNodes.length + 1) / 2; + bytes32[] memory _newRow = new bytes32[](newRowLength); + + uint256 length = _newNodes.length; + for (uint256 i; i < length; i = i.uncheckedAdd(2)) { + self._nodes[_height][i] = _newNodes[i]; + if (i + 1 < length) { + self._nodes[_height][i + 1] = _newNodes[i + 1]; + _newRow[i / 2] = Merkle.efficientHash(_newNodes[i], _newNodes[i + 1]); + } else { + // Handle odd number of nodes by hashing the last node with zero + _newRow[i / 2] = Merkle.efficientHash(_newNodes[i], self._zeros[_height]); + } + } + return updateAllNodesAtHeight(self, _height + 1, _newRow); + } + + /** + * @dev Returns the root of the tree. + */ + function root(FullTree storage self) internal view returns (bytes32) { + return self._nodes[self._height][0]; + } +} diff --git a/l1-contracts/contracts/common/libraries/L2ContractHelper.sol b/l1-contracts/contracts/common/libraries/L2ContractHelper.sol index ae1a64250..2d1a26c1f 100644 --- a/l1-contracts/contracts/common/libraries/L2ContractHelper.sol +++ b/l1-contracts/contracts/common/libraries/L2ContractHelper.sol @@ -1,6 +1,42 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; +import {BytecodeError, MalformedBytecode, LengthIsNotDivisibleBy32} from "../L1ContractErrors.sol"; + +import {UncheckedMath} from "./UncheckedMath.sol"; +import {L2_MESSENGER} from "../L2ContractAddresses.sol"; + +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice Interface for the contract that is used to deploy contracts on L2. + */ +interface IContractDeployer { + /// @notice A struct that describes a forced deployment on an address. + /// @param bytecodeHash The bytecode hash to put on an address. + /// @param newAddress The address on which to deploy the bytecodehash to. + /// @param callConstructor Whether to run the constructor on the force deployment. + /// @param value The `msg.value` with which to initialize a contract. + /// @param input The constructor calldata. + struct ForceDeployment { + bytes32 bytecodeHash; + address newAddress; + bool callConstructor; + uint256 value; + bytes input; + } + + /// @notice This method is to be used only during an upgrade to set bytecodes on specific addresses. + /// @param _deployParams A set of parameters describing force deployment. + function forceDeployOnAddresses(ForceDeployment[] calldata _deployParams) external payable; + + /// @notice Creates a new contract at a determined address using the `CREATE2` salt on L2 + /// @param _salt a unique value to create the deterministic address of the new contract + /// @param _bytecodeHash the bytecodehash of the new contract to be deployed + /// @param _input the calldata to be sent to the constructor of the new contract + function create2(bytes32 _salt, bytes32 _bytecodeHash, bytes calldata _input) external returns (address); +} /** * @author Matter Labs @@ -8,9 +44,18 @@ pragma solidity 0.8.24; * @notice Helper library for working with L2 contracts on L1. */ library L2ContractHelper { + using UncheckedMath for uint256; + /// @dev The prefix used to create CREATE2 addresses. bytes32 private constant CREATE2_PREFIX = keccak256("zksyncCreate2"); + /// @notice Sends L2 -> L1 arbitrary-long message through the system contract messenger. + /// @param _message Data to be sent to L1. + /// @return keccak256 hash of the sent message. + function sendMessageToL1(bytes memory _message) internal returns (bytes32) { + return L2_MESSENGER.sendToL1(_message); + } + /// @notice Validate the bytecode format and calculate its hash. /// @param _bytecode The bytecode to hash. /// @return hashedBytecode The 32-byte hash of the bytecode. @@ -20,11 +65,19 @@ library L2ContractHelper { /// - Bytecode words length is not odd function hashL2Bytecode(bytes memory _bytecode) internal pure returns (bytes32 hashedBytecode) { // Note that the length of the bytecode must be provided in 32-byte words. - require(_bytecode.length % 32 == 0, "pq"); + if (_bytecode.length % 32 != 0) { + revert LengthIsNotDivisibleBy32(_bytecode.length); + } uint256 bytecodeLenInWords = _bytecode.length / 32; - require(bytecodeLenInWords < 2 ** 16, "pp"); // bytecode length must be less than 2^16 words - require(bytecodeLenInWords % 2 == 1, "ps"); // bytecode length in words must be odd + // bytecode length must be less than 2^16 words + if (bytecodeLenInWords >= 2 ** 16) { + revert MalformedBytecode(BytecodeError.NumberOfWords); + } + // bytecode length in words must be odd + if (bytecodeLenInWords % 2 == 0) { + revert MalformedBytecode(BytecodeError.WordsMustBeOdd); + } hashedBytecode = sha256(_bytecode) & 0x00000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF; // Setting the version of the hash hashedBytecode = (hashedBytecode | bytes32(uint256(1 << 248))); @@ -38,9 +91,15 @@ library L2ContractHelper { /// @param _bytecodeHash The hash of the bytecode to validate. function validateBytecodeHash(bytes32 _bytecodeHash) internal pure { uint8 version = uint8(_bytecodeHash[0]); - require(version == 1 && _bytecodeHash[1] == bytes1(0), "zf"); // Incorrectly formatted bytecodeHash + // Incorrectly formatted bytecodeHash + if (version != 1 || _bytecodeHash[1] != bytes1(0)) { + revert MalformedBytecode(BytecodeError.Version); + } - require(bytecodeLen(_bytecodeHash) % 2 == 1, "uy"); // Code length in words must be odd + // Code length in words must be odd + if (bytecodeLen(_bytecodeHash) % 2 == 0) { + revert MalformedBytecode(BytecodeError.WordsMustBeOdd); + } } /// @notice Returns the length of the bytecode associated with the given hash. @@ -71,4 +130,18 @@ library L2ContractHelper { return address(uint160(uint256(data))); } + + /// @notice Hashes the L2 bytecodes and returns them in the format in which they are processed by the bootloader + function hashFactoryDeps(bytes[] memory _factoryDeps) internal pure returns (uint256[] memory hashedFactoryDeps) { + uint256 factoryDepsLen = _factoryDeps.length; + hashedFactoryDeps = new uint256[](factoryDepsLen); + for (uint256 i = 0; i < factoryDepsLen; i = i.uncheckedInc()) { + bytes32 hashedBytecode = hashL2Bytecode(_factoryDeps[i]); + + // Store the resulting hash sequentially in bytes. + assembly { + mstore(add(hashedFactoryDeps, mul(add(i, 1), 32)), hashedBytecode) + } + } + } } diff --git a/l1-contracts/contracts/common/libraries/Merkle.sol b/l1-contracts/contracts/common/libraries/Merkle.sol new file mode 100644 index 000000000..66db8ea75 --- /dev/null +++ b/l1-contracts/contracts/common/libraries/Merkle.sol @@ -0,0 +1,131 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; + +// solhint-disable gas-custom-errors + +import {UncheckedMath} from "../../common/libraries/UncheckedMath.sol"; +import {MerklePathEmpty, MerklePathOutOfBounds, MerkleIndexOutOfBounds} from "../../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +library Merkle { + using UncheckedMath for uint256; + + /// @dev Calculate Merkle root by the provided Merkle proof. + /// NOTE: When using this function, check that the _path length is equal to the tree height to prevent shorter/longer paths attack + /// @param _path Merkle path from the leaf to the root + /// @param _index Leaf index in the tree + /// @param _itemHash Hash of leaf content + /// @return The Merkle root + function calculateRoot( + bytes32[] calldata _path, + uint256 _index, + bytes32 _itemHash + ) internal pure returns (bytes32) { + uint256 pathLength = _path.length; + _validatePathLengthForSingleProof(_index, pathLength); + + bytes32 currentHash = _itemHash; + for (uint256 i; i < pathLength; i = i.uncheckedInc()) { + currentHash = (_index % 2 == 0) + ? efficientHash(currentHash, _path[i]) + : efficientHash(_path[i], currentHash); + _index /= 2; + } + + return currentHash; + } + + /// @dev Calculate Merkle root by the provided Merkle proof. + /// NOTE: When using this function, check that the _path length is equal to the tree height to prevent shorter/longer paths attack + /// @param _path Merkle path from the leaf to the root + /// @param _index Leaf index in the tree + /// @param _itemHash Hash of leaf content + /// @return The Merkle root + function calculateRootMemory( + bytes32[] memory _path, + uint256 _index, + bytes32 _itemHash + ) internal pure returns (bytes32) { + uint256 pathLength = _path.length; + _validatePathLengthForSingleProof(_index, pathLength); + + bytes32 currentHash = _itemHash; + for (uint256 i; i < pathLength; i = i.uncheckedInc()) { + currentHash = (_index % 2 == 0) + ? efficientHash(currentHash, _path[i]) + : efficientHash(_path[i], currentHash); + _index /= 2; + } + + return currentHash; + } + + /// @dev Calculate Merkle root by the provided Merkle proof for a range of elements + /// NOTE: When using this function, check that the _startPath and _endPath lengths are equal to the tree height to prevent shorter/longer paths attack + /// @param _startPath Merkle path from the first element of the range to the root + /// @param _endPath Merkle path from the last element of the range to the root + /// @param _startIndex Index of the first element of the range in the tree + /// @param _itemHashes Hashes of the elements in the range + /// @return The Merkle root + function calculateRootPaths( + bytes32[] memory _startPath, + bytes32[] memory _endPath, + uint256 _startIndex, + bytes32[] memory _itemHashes + ) internal pure returns (bytes32) { + uint256 pathLength = _startPath.length; + require(pathLength == _endPath.length, "Merkle: path length mismatch"); + if (pathLength >= 256) { + revert MerklePathOutOfBounds(); + } + uint256 levelLen = _itemHashes.length; + // Edge case: we want to be able to prove an element in a single-node tree. + if (pathLength == 0 && (_startIndex != 0 || levelLen != 1)) { + revert MerklePathEmpty(); + } + require(levelLen > 0, "Merkle: nothing to prove"); + require(_startIndex + levelLen <= (1 << pathLength), "Merkle: index/height mismatch"); + bytes32[] memory itemHashes = _itemHashes; + + for (uint256 level; level < pathLength; level = level.uncheckedInc()) { + uint256 parity = _startIndex % 2; + // We get an extra element on the next level if on the current level elements either + // start on an odd index (`parity == 1`) or end on an even index (`levelLen % 2 == 1`) + uint256 nextLevelLen = levelLen / 2 + (parity | (levelLen % 2)); + for (uint256 i; i < nextLevelLen; i = i.uncheckedInc()) { + bytes32 lhs = (i == 0 && parity == 1) ? _startPath[level] : itemHashes[2 * i - parity]; + bytes32 rhs = (i == nextLevelLen - 1 && (levelLen - parity) % 2 == 1) + ? _endPath[level] + : itemHashes[2 * i + 1 - parity]; + itemHashes[i] = efficientHash(lhs, rhs); + } + levelLen = nextLevelLen; + _startIndex /= 2; + } + + return itemHashes[0]; + } + + /// @dev Keccak hash of the concatenation of two 32-byte words + function efficientHash(bytes32 _lhs, bytes32 _rhs) internal pure returns (bytes32 result) { + assembly { + mstore(0x00, _lhs) + mstore(0x20, _rhs) + result := keccak256(0x00, 0x40) + } + } + + function _validatePathLengthForSingleProof(uint256 _index, uint256 _pathLength) private pure { + if (_pathLength == 0) { + revert MerklePathEmpty(); + } + if (_pathLength >= 256) { + revert MerklePathOutOfBounds(); + } + if (_index >= (1 << _pathLength)) { + revert MerkleIndexOutOfBounds(); + } + } +} diff --git a/l1-contracts/contracts/common/libraries/MessageHashing.sol b/l1-contracts/contracts/common/libraries/MessageHashing.sol new file mode 100644 index 000000000..b7009482d --- /dev/null +++ b/l1-contracts/contracts/common/libraries/MessageHashing.sol @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +bytes32 constant BATCH_LEAF_PADDING = keccak256("zkSync:BatchLeaf"); +bytes32 constant CHAIN_ID_LEAF_PADDING = keccak256("zkSync:ChainIdLeaf"); + +library MessageHashing { + /// @dev Returns the leaf hash for a chain with batch number and batch root. + /// @param batchRoot The root hash of the batch. + /// @param batchNumber The number of the batch. + function batchLeafHash(bytes32 batchRoot, uint256 batchNumber) internal pure returns (bytes32) { + return keccak256(abi.encodePacked(BATCH_LEAF_PADDING, batchRoot, batchNumber)); + } + + /// @dev Returns the leaf hash for a chain with chain root and chain id. + /// @param chainIdRoot The root hash of the chain. + /// @param chainId The id of the chain. + function chainIdLeafHash(bytes32 chainIdRoot, uint256 chainId) internal pure returns (bytes32) { + return keccak256(abi.encodePacked(CHAIN_ID_LEAF_PADDING, chainIdRoot, chainId)); + } +} diff --git a/l1-contracts/contracts/common/libraries/SemVer.sol b/l1-contracts/contracts/common/libraries/SemVer.sol index d20f6a1d1..c46051626 100644 --- a/l1-contracts/contracts/common/libraries/SemVer.sol +++ b/l1-contracts/contracts/common/libraries/SemVer.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @dev The number of bits dedicated to the "patch" portion of the protocol version. /// This also defines the bit starting from which the "minor" part is located. diff --git a/l1-contracts/contracts/common/libraries/SystemContractsCaller.sol b/l1-contracts/contracts/common/libraries/SystemContractsCaller.sol new file mode 100644 index 000000000..b6bf0c54a --- /dev/null +++ b/l1-contracts/contracts/common/libraries/SystemContractsCaller.sol @@ -0,0 +1,145 @@ +// SPDX-License-Identifier: MIT + +// solhint-disable one-contract-per-file + +pragma solidity 0.8.24; + +import {MSG_VALUE_SYSTEM_CONTRACT} from "../L2ContractAddresses.sol"; + +address constant SYSTEM_CALL_CALL_ADDRESS = address((1 << 16) - 11); +/// @dev If the bitwise AND of the extraAbi[2] param when calling the MSG_VALUE_SIMULATOR +/// is non-zero, the call will be assumed to be a system one. +uint256 constant MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT = 1; + +/// @notice The way to forward the calldata: +/// - Use the current heap (i.e. the same as on EVM). +/// - Use the auxiliary heap. +/// - Forward via a pointer +/// @dev Note, that currently, users do not have access to the auxiliary +/// heap and so the only type of forwarding that will be used by the users +/// are UseHeap and ForwardFatPointer for forwarding a slice of the current calldata +/// to the next call. +enum CalldataForwardingMode { + UseHeap, + ForwardFatPointer, + UseAuxHeap +} + +/// @notice Error thrown a cast from uint256 to u32 is not possible. +error U32CastOverflow(); + +library Utils { + function safeCastToU32(uint256 _x) internal pure returns (uint32) { + if (_x > type(uint32).max) { + revert U32CastOverflow(); + } + + return uint32(_x); + } +} + +/// @notice The library contains the functions to make system calls. +/// @dev A more detailed description of the library and its methods can be found in the `system-contracts` repo. +library SystemContractsCaller { + function systemCall(uint32 gasLimit, address to, uint256 value, bytes memory data) internal returns (bool success) { + address callAddr = SYSTEM_CALL_CALL_ADDRESS; + + uint32 dataStart; + assembly { + dataStart := add(data, 0x20) + } + uint32 dataLength = uint32(Utils.safeCastToU32(data.length)); + + uint256 farCallAbi = getFarCallABI({ + dataOffset: 0, + memoryPage: 0, + dataStart: dataStart, + dataLength: dataLength, + gasPassed: gasLimit, + // Only rollup is supported for now + shardId: 0, + forwardingMode: CalldataForwardingMode.UseHeap, + isConstructorCall: false, + isSystemCall: true + }); + + if (value == 0) { + // Doing the system call directly + assembly { + success := call(to, callAddr, 0, 0, farCallAbi, 0, 0) + } + } else { + address msgValueSimulator = MSG_VALUE_SYSTEM_CONTRACT; + // We need to supply the mask to the MsgValueSimulator to denote + // that the call should be a system one. + uint256 forwardMask = MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT; + + assembly { + success := call(msgValueSimulator, callAddr, value, to, farCallAbi, forwardMask, 0) + } + } + } + + function systemCallWithReturndata( + uint32 gasLimit, + address to, + uint128 value, + bytes memory data + ) internal returns (bool success, bytes memory returnData) { + success = systemCall(gasLimit, to, value, data); + + uint256 size; + assembly { + size := returndatasize() + } + + returnData = new bytes(size); + assembly { + returndatacopy(add(returnData, 0x20), 0, size) + } + } + + function getFarCallABI( + uint32 dataOffset, + uint32 memoryPage, + uint32 dataStart, + uint32 dataLength, + uint32 gasPassed, + uint8 shardId, + CalldataForwardingMode forwardingMode, + bool isConstructorCall, + bool isSystemCall + ) internal pure returns (uint256 farCallAbi) { + // Fill in the call parameter fields + farCallAbi = getFarCallABIWithEmptyFatPointer({ + gasPassed: gasPassed, + shardId: shardId, + forwardingMode: forwardingMode, + isConstructorCall: isConstructorCall, + isSystemCall: isSystemCall + }); + // Fill in the fat pointer fields + farCallAbi |= dataOffset; + farCallAbi |= (uint256(memoryPage) << 32); + farCallAbi |= (uint256(dataStart) << 64); + farCallAbi |= (uint256(dataLength) << 96); + } + + function getFarCallABIWithEmptyFatPointer( + uint32 gasPassed, + uint8 shardId, + CalldataForwardingMode forwardingMode, + bool isConstructorCall, + bool isSystemCall + ) internal pure returns (uint256 farCallAbiWithEmptyFatPtr) { + farCallAbiWithEmptyFatPtr |= (uint256(gasPassed) << 192); + farCallAbiWithEmptyFatPtr |= (uint256(forwardingMode) << 224); + farCallAbiWithEmptyFatPtr |= (uint256(shardId) << 232); + if (isConstructorCall) { + farCallAbiWithEmptyFatPtr |= (1 << 240); + } + if (isSystemCall) { + farCallAbiWithEmptyFatPtr |= (1 << 248); + } + } +} diff --git a/l1-contracts/contracts/common/libraries/UncheckedMath.sol b/l1-contracts/contracts/common/libraries/UncheckedMath.sol index 6adfabf17..a41a9c6ea 100644 --- a/l1-contracts/contracts/common/libraries/UncheckedMath.sol +++ b/l1-contracts/contracts/common/libraries/UncheckedMath.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /** * @author Matter Labs diff --git a/l1-contracts/contracts/common/libraries/UnsafeBytes.sol b/l1-contracts/contracts/common/libraries/UnsafeBytes.sol index 5f0647489..4edf94004 100644 --- a/l1-contracts/contracts/common/libraries/UnsafeBytes.sol +++ b/l1-contracts/contracts/common/libraries/UnsafeBytes.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /** * @author Matter Labs @@ -30,6 +30,13 @@ library UnsafeBytes { } } + function readUint128(bytes memory _bytes, uint256 _start) internal pure returns (uint128 result, uint256 offset) { + assembly { + offset := add(_start, 16) + result := mload(add(_bytes, offset)) + } + } + function readUint256(bytes memory _bytes, uint256 _start) internal pure returns (uint256 result, uint256 offset) { assembly { offset := add(_start, 32) @@ -43,4 +50,13 @@ library UnsafeBytes { result := mload(add(_bytes, offset)) } } + + function readRemainingBytes(bytes memory _bytes, uint256 _start) internal pure returns (bytes memory result) { + uint256 arrayLen = _bytes.length - _start; + result = new bytes(arrayLen); + + assembly { + mcopy(add(result, 0x20), add(_bytes, add(0x20, _start)), arrayLen) + } + } } diff --git a/l1-contracts/contracts/dev-contracts/DummyL1ERC20Bridge.sol b/l1-contracts/contracts/dev-contracts/DummyL1ERC20Bridge.sol index 8155ddf6b..5ca21d4ba 100644 --- a/l1-contracts/contracts/dev-contracts/DummyL1ERC20Bridge.sol +++ b/l1-contracts/contracts/dev-contracts/DummyL1ERC20Bridge.sol @@ -3,14 +3,24 @@ pragma solidity 0.8.24; import {L1ERC20Bridge} from "../bridge/L1ERC20Bridge.sol"; -import {IL1SharedBridge} from "../bridge/interfaces/IL1SharedBridge.sol"; +import {IL1AssetRouter} from "../bridge/asset-router/IL1AssetRouter.sol"; +import {IL1NativeTokenVault} from "../bridge/ntv/IL1NativeTokenVault.sol"; +import {IL1Nullifier} from "../bridge/interfaces/IL1Nullifier.sol"; contract DummyL1ERC20Bridge is L1ERC20Bridge { - constructor(IL1SharedBridge _l1SharedBridge) L1ERC20Bridge(_l1SharedBridge) {} + constructor( + IL1Nullifier _l1Nullifier, + IL1AssetRouter _l1SharedBridge, + IL1NativeTokenVault _l1NativeTokenVault, + uint256 _eraChainId + ) L1ERC20Bridge(_l1Nullifier, _l1SharedBridge, _l1NativeTokenVault, _eraChainId) {} - function setValues(address _l2Bridge, address _l2TokenBeacon, bytes32 _l2TokenProxyBytecodeHash) external { - l2Bridge = _l2Bridge; + function setValues(address _l2SharedBridge, address _l2TokenBeacon, bytes32 _l2TokenProxyBytecodeHash) external { + l2Bridge = _l2SharedBridge; l2TokenBeacon = _l2TokenBeacon; l2TokenProxyBytecodeHash = _l2TokenProxyBytecodeHash; } + + // add this to be excluded from coverage report + function test() internal virtual {} } diff --git a/l1-contracts/contracts/dev-contracts/L1NullifierDev.sol b/l1-contracts/contracts/dev-contracts/L1NullifierDev.sol new file mode 100644 index 000000000..062d168cd --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/L1NullifierDev.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {L1Nullifier, IBridgehub} from "../bridge/L1Nullifier.sol"; + +contract L1NullifierDev is L1Nullifier { + constructor( + IBridgehub _bridgehub, + uint256 _eraChainId, + address _eraDiamondProxy + ) L1Nullifier(_bridgehub, _eraChainId, _eraDiamondProxy) {} + + function setL2LegacySharedBridge(uint256 _chainId, address _l2Bridge) external { + __DEPRECATED_l2BridgeAddress[_chainId] = _l2Bridge; + } + + // add this to be excluded from coverage report + function test() internal virtual {} +} diff --git a/l1-contracts/contracts/dev-contracts/Multicall.sol b/l1-contracts/contracts/dev-contracts/Multicall.sol index e2b1391dd..242c01e24 100644 --- a/l1-contracts/contracts/dev-contracts/Multicall.sol +++ b/l1-contracts/contracts/dev-contracts/Multicall.sol @@ -33,7 +33,8 @@ contract Multicall { function aggregate(Call[] memory calls) public returns (uint256 blockNumber, bytes[] memory returnData) { blockNumber = block.number; returnData = new bytes[](calls.length); - for (uint256 i = 0; i < calls.length; ++i) { + uint256 callsLength = calls.length; + for (uint256 i = 0; i < callsLength; ++i) { (bool success, bytes memory ret) = calls[i].target.call(calls[i].callData); require(success, "multicall 1"); returnData[i] = ret; diff --git a/l1-contracts/contracts/dev-contracts/RevertReceiveAccount.sol b/l1-contracts/contracts/dev-contracts/RevertReceiveAccount.sol index 31575de1b..663afdfdc 100644 --- a/l1-contracts/contracts/dev-contracts/RevertReceiveAccount.sol +++ b/l1-contracts/contracts/dev-contracts/RevertReceiveAccount.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; /// @title RevertReceiveAccount - An account which reverts receiving funds depending on the flag -/// @dev Used for testing failed withdrawals from the zkSync smart contract +/// @dev Used for testing failed withdrawals from the ZKsync smart contract contract RevertReceiveAccount { // add this to be excluded from coverage report function test() internal virtual {} diff --git a/l1-contracts/contracts/dev-contracts/RevertTransferERC20.sol b/l1-contracts/contracts/dev-contracts/RevertTransferERC20.sol index bd018276d..dcc1f71f7 100644 --- a/l1-contracts/contracts/dev-contracts/RevertTransferERC20.sol +++ b/l1-contracts/contracts/dev-contracts/RevertTransferERC20.sol @@ -5,7 +5,7 @@ pragma solidity 0.8.24; import {TestnetERC20Token} from "./TestnetERC20Token.sol"; /// @title RevertTransferERC20Token - A ERC20 token contract which can revert transfers depending on a flag -/// @dev Used for testing failed ERC-20 withdrawals from the zkSync smart contract +/// @dev Used for testing failed ERC-20 withdrawals from the ZKsync smart contract contract RevertTransferERC20 is TestnetERC20Token { // add this to be excluded from coverage report function test() internal override {} diff --git a/l1-contracts/contracts/dev-contracts/SingletonFactory.sol b/l1-contracts/contracts/dev-contracts/SingletonFactory.sol index b1bd999cf..3e8b9eec9 100644 --- a/l1-contracts/contracts/dev-contracts/SingletonFactory.sol +++ b/l1-contracts/contracts/dev-contracts/SingletonFactory.sol @@ -1,3 +1,4 @@ +// SPDX-License-Identifier: MIT pragma solidity 0.8.24; /** diff --git a/l1-contracts/contracts/dev-contracts/TestnetERC20Token.sol b/l1-contracts/contracts/dev-contracts/TestnetERC20Token.sol index 4eaddf921..2b189d1c9 100644 --- a/l1-contracts/contracts/dev-contracts/TestnetERC20Token.sol +++ b/l1-contracts/contracts/dev-contracts/TestnetERC20Token.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.24; -import {ERC20} from "@openzeppelin/contracts/token/ERC20/ERC20.sol"; +import {ERC20} from "@openzeppelin/contracts-v4/token/ERC20/ERC20.sol"; contract TestnetERC20Token is ERC20 { // add this to be excluded from coverage report diff --git a/l1-contracts/contracts/dev-contracts/WETH9.sol b/l1-contracts/contracts/dev-contracts/WETH9.sol index e094ba89e..5ab311b13 100644 --- a/l1-contracts/contracts/dev-contracts/WETH9.sol +++ b/l1-contracts/contracts/dev-contracts/WETH9.sol @@ -30,7 +30,17 @@ contract WETH9 { function withdraw(uint256 wad) public { require(balanceOf[msg.sender] >= wad, "weth9, 1"); balanceOf[msg.sender] -= wad; - payable(msg.sender).transfer(wad); + // this is a hack so that zkfoundry works, but we are deploying WETH9 on L2 as well. + // payable(msg.sender).transfer(wad); + bool callSuccess; + address sender = msg.sender; + // Low-level assembly call, to avoid any memory copying (save gas) + assembly { + callSuccess := call(gas(), sender, wad, 0, 0, 0, 0) + } + if (!callSuccess) { + require(false, "Withdraw failed"); + } emit Withdrawal(msg.sender, wad); } diff --git a/l1-contracts/contracts/dev-contracts/test/AdminFacetTest.sol b/l1-contracts/contracts/dev-contracts/test/AdminFacetTest.sol index 614c34bb9..bf5ef724f 100644 --- a/l1-contracts/contracts/dev-contracts/test/AdminFacetTest.sol +++ b/l1-contracts/contracts/dev-contracts/test/AdminFacetTest.sol @@ -8,9 +8,9 @@ contract AdminFacetTest is AdminFacet { // add this to be excluded from coverage report function test() internal virtual {} - constructor() { + constructor(uint256 _l1ChainId) AdminFacet(_l1ChainId) { s.admin = msg.sender; - s.stateTransitionManager = msg.sender; + s.chainTypeManager = msg.sender; } function getPorterAvailability() external view returns (bool) { diff --git a/l1-contracts/contracts/dev-contracts/test/CustomUpgradeTest.sol b/l1-contracts/contracts/dev-contracts/test/CustomUpgradeTest.sol index 7055ce557..13cd4d335 100644 --- a/l1-contracts/contracts/dev-contracts/test/CustomUpgradeTest.sol +++ b/l1-contracts/contracts/dev-contracts/test/CustomUpgradeTest.sol @@ -30,7 +30,7 @@ contract CustomUpgradeTest is BaseZkSyncUpgrade { function upgrade(ProposedUpgrade calldata _proposedUpgrade) public override returns (bytes32) { (uint32 newMinorVersion, bool isPatchOnly) = _setNewProtocolVersion(_proposedUpgrade.newProtocolVersion); _upgradeL1Contract(_proposedUpgrade.l1ContractsUpgradeCalldata); - _upgradeVerifier(_proposedUpgrade.verifier, _proposedUpgrade.verifierParams); + _upgradeVerifiers(_proposedUpgrade.dualVerifier, _proposedUpgrade.plonkVerifier, _proposedUpgrade.fflonkVerifier, _proposedUpgrade.fflonkProofLength, _proposedUpgrade.verifierParams); _setBaseSystemContracts(_proposedUpgrade.bootloaderHash, _proposedUpgrade.defaultAccountHash, isPatchOnly); bytes32 txHash; diff --git a/l1-contracts/contracts/dev-contracts/test/DiamondProxyTest.sol b/l1-contracts/contracts/dev-contracts/test/DiamondProxyTest.sol index 212a2b76a..a8ae37582 100644 --- a/l1-contracts/contracts/dev-contracts/test/DiamondProxyTest.sol +++ b/l1-contracts/contracts/dev-contracts/test/DiamondProxyTest.sol @@ -3,9 +3,9 @@ pragma solidity 0.8.24; import {Diamond} from "../../state-transition/libraries/Diamond.sol"; -import {ZkSyncHyperchainBase} from "../../state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol"; +import {ZKChainBase} from "../../state-transition/chain-deps/facets/ZKChainBase.sol"; -contract DiamondProxyTest is ZkSyncHyperchainBase { +contract DiamondProxyTest is ZKChainBase { // add this to be excluded from coverage report function test() internal virtual {} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyAdminFacet.sol b/l1-contracts/contracts/dev-contracts/test/DummyAdminFacet.sol index 0a27a7e1c..82c64c4e8 100644 --- a/l1-contracts/contracts/dev-contracts/test/DummyAdminFacet.sol +++ b/l1-contracts/contracts/dev-contracts/test/DummyAdminFacet.sol @@ -2,9 +2,9 @@ pragma solidity 0.8.24; -import {ZkSyncHyperchainBase} from "../../state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol"; +import {ZKChainBase} from "../../state-transition/chain-deps/facets/ZKChainBase.sol"; -contract DummyAdminFacet is ZkSyncHyperchainBase { +contract DummyAdminFacet is ZKChainBase { // add this to be excluded from coverage report function test() internal virtual {} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyAdminFacetNoOverlap.sol b/l1-contracts/contracts/dev-contracts/test/DummyAdminFacetNoOverlap.sol index 0805e535c..b66c76bf0 100644 --- a/l1-contracts/contracts/dev-contracts/test/DummyAdminFacetNoOverlap.sol +++ b/l1-contracts/contracts/dev-contracts/test/DummyAdminFacetNoOverlap.sol @@ -3,10 +3,12 @@ pragma solidity 0.8.24; import {Diamond} from "../../state-transition/libraries/Diamond.sol"; -import {ZkSyncHyperchainBase} from "../../state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol"; +import {ZKChainBase} from "../../state-transition/chain-deps/facets/ZKChainBase.sol"; +import {IL1AssetRouter} from "../../bridge/asset-router/IL1AssetRouter.sol"; +import {DataEncoding} from "../../common/libraries/DataEncoding.sol"; /// selectors do not overlap with normal facet selectors (getName does not count) -contract DummyAdminFacetNoOverlap is ZkSyncHyperchainBase { +contract DummyAdminFacetNoOverlap is ZKChainBase { // add this to be excluded from coverage report function test() internal virtual {} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyBridgehub.sol b/l1-contracts/contracts/dev-contracts/test/DummyBridgehub.sol new file mode 100644 index 000000000..f178fc0ed --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/DummyBridgehub.sol @@ -0,0 +1,50 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {ETH_TOKEN_ADDRESS} from "../../common/Config.sol"; +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "../../common/L2ContractAddresses.sol"; +import {IMessageRoot} from "../../bridgehub/IMessageRoot.sol"; + +import {IGetters} from "../../state-transition/chain-interfaces/IGetters.sol"; + +/// @title DummyBridgehub +/// @notice A test smart contract that allows to set State Transition Manager for a given chain +contract DummyBridgehub { + IMessageRoot public messageRoot; + + address public zkChain; + + address public sharedBridge; + + // add this to be excluded from coverage report + function test() internal virtual {} + + function baseTokenAssetId(uint256) external view returns (bytes32) { + return + keccak256( + abi.encode( + block.chainid, + L2_NATIVE_TOKEN_VAULT_ADDR, + ETH_TOKEN_ADDRESS + // bytes32(uint256(uint160(IGetters(msg.sender).getBaseToken()))) + ) + ); + } + + function setMessageRoot(address _messageRoot) public { + messageRoot = IMessageRoot(_messageRoot); + } + + function setZKChain(uint256, address _zkChain) external { + zkChain = _zkChain; + } + + function getZKChain(uint256) external view returns (address) { + return address(0); + } + + function setSharedBridge(address addr) external { + sharedBridge = addr; + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyBridgehubSetter.sol b/l1-contracts/contracts/dev-contracts/test/DummyBridgehubSetter.sol new file mode 100644 index 000000000..8ae0404e7 --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/DummyBridgehubSetter.sol @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Bridgehub} from "../../bridgehub/Bridgehub.sol"; + +contract DummyBridgehubSetter is Bridgehub { + // add this to be excluded from coverage report + function test() internal virtual {} + + /// @notice Constructor + constructor( + uint256 _l1ChainId, + address _owner, + uint256 _maxNumberOfZKChains + ) Bridgehub(_l1ChainId, _owner, _maxNumberOfZKChains) {} + + function setZKChain(uint256 _chainId, address _zkChain) external { + _registerNewZKChain(_chainId, _zkChain); + } + + function setCTM(uint256 _chainId, address _ctm) external { + chainTypeManager[_chainId] = _ctm; + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManager.sol b/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManager.sol new file mode 100644 index 000000000..20cc25328 --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManager.sol @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {EnumerableMap} from "@openzeppelin/contracts-v4/utils/structs/EnumerableMap.sol"; + +import {ChainTypeManager} from "../../state-transition/ChainTypeManager.sol"; + +/// @title DummyExecutor +/// @notice A test smart contract implementing the IExecutor interface to simulate Executor behavior for testing purposes. +contract DummyChainTypeManager is ChainTypeManager { + using EnumerableMap for EnumerableMap.UintToAddressMap; + + // add this to be excluded from coverage report + function test() internal virtual {} + + address zkChain; + + /// @notice Constructor + constructor() ChainTypeManager(address(0)) {} + + function setZKChain(uint256 _chainId, address _zkChain) external { + zkChain = _zkChain; + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManagerForValidatorTimelock.sol b/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManagerForValidatorTimelock.sol similarity index 51% rename from l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManagerForValidatorTimelock.sol rename to l1-contracts/contracts/dev-contracts/test/DummyChainTypeManagerForValidatorTimelock.sol index f2944d3a8..8e876abb2 100644 --- a/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManagerForValidatorTimelock.sol +++ b/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManagerForValidatorTimelock.sol @@ -2,25 +2,29 @@ pragma solidity 0.8.24; -/// @title DummyStateTransitionManagerForValidatorTimelock +/// @title DummyChainTypeManagerForValidatorTimelock /// @notice A test smart contract implementing the IExecutor interface to simulate Executor behavior for testing purposes. -contract DummyStateTransitionManagerForValidatorTimelock { +contract DummyChainTypeManagerForValidatorTimelock { // add this to be excluded from coverage report function test() internal virtual {} address public chainAdmin; - address public hyperchainAddress; + address public zkChainAddress; - constructor(address _chainAdmin, address _hyperchain) { + constructor(address _chainAdmin, address _zkChain) { chainAdmin = _chainAdmin; - hyperchainAddress = _hyperchain; + zkChainAddress = _zkChain; } function getChainAdmin(uint256) external view returns (address) { return chainAdmin; } - function getHyperchain(uint256) external view returns (address) { - return hyperchainAddress; + function getZKChain(uint256) external view returns (address) { + return zkChainAddress; + } + + function setZKChain(uint256, address _zkChain) external { + zkChainAddress = _zkChain; } } diff --git a/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManagerWithBridgeHubAddress.sol b/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManagerWithBridgeHubAddress.sol new file mode 100644 index 000000000..9f6acd198 --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/DummyChainTypeManagerWithBridgeHubAddress.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {EnumerableMap} from "@openzeppelin/contracts-v4/utils/structs/EnumerableMap.sol"; + +import {ChainTypeManager} from "../../state-transition/ChainTypeManager.sol"; + +/// @title DummyExecutor +/// @notice A test smart contract implementing the IExecutor interface to simulate Executor behavior for testing purposes. +contract DummyChainTypeManagerWBH is ChainTypeManager { + using EnumerableMap for EnumerableMap.UintToAddressMap; + + address zkChain; + /// @notice Constructor + constructor(address bridgeHub) ChainTypeManager(bridgeHub) {} + + function setZKChain(uint256 _chainId, address _zkChain) external { + zkChain = _zkChain; + } + + // add this to be excluded from coverage report + function test() internal {} +} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyEraBaseTokenBridge.sol b/l1-contracts/contracts/dev-contracts/test/DummyEraBaseTokenBridge.sol index 96382c44f..bb450b261 100644 --- a/l1-contracts/contracts/dev-contracts/test/DummyEraBaseTokenBridge.sol +++ b/l1-contracts/contracts/dev-contracts/test/DummyEraBaseTokenBridge.sol @@ -8,7 +8,7 @@ contract DummyEraBaseTokenBridge { function bridgehubDepositBaseToken( uint256 _chainId, - address _prevMsgSender, + address _originalCaller, address _l1Token, uint256 _amount ) external payable {} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyExecutor.sol b/l1-contracts/contracts/dev-contracts/test/DummyExecutor.sol deleted file mode 100644 index 7da7113b2..000000000 --- a/l1-contracts/contracts/dev-contracts/test/DummyExecutor.sol +++ /dev/null @@ -1,153 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {IExecutor} from "../../state-transition/chain-interfaces/IExecutor.sol"; - -/// @title DummyExecutor -/// @notice A test smart contract implementing the IExecutor interface to simulate Executor behavior for testing purposes. -contract DummyExecutor is IExecutor { - // add this to be excluded from coverage report - function test() internal virtual {} - - address owner; - - // Flags to control if the contract should revert during commit, prove, and execute batch operations - bool shouldRevertOnCommitBatches; - bool shouldRevertOnProveBatches; - bool shouldRevertOnExecuteBatches; - - // Counters to track the total number of committed, verified, and executed batches - uint256 public getTotalBatchesCommitted; - uint256 public getTotalBatchesVerified; - uint256 public getTotalBatchesExecuted; - string public constant override getName = "DummyExecutor"; - - /// @notice Constructor sets the contract owner to the message sender - constructor() { - owner = msg.sender; - } - - /// @notice Modifier that only allows the owner to call certain functions - modifier onlyOwner() { - require(msg.sender == owner); - _; - } - - function getAdmin() external view returns (address) { - return owner; - } - - /// @notice Removing txs from the priority queue - function removePriorityQueueFront(uint256 _index) external {} - - /// @notice Allows the owner to set whether the contract should revert during commit blocks operation - function setShouldRevertOnCommitBatches(bool _shouldRevert) external onlyOwner { - shouldRevertOnCommitBatches = _shouldRevert; - } - - /// @notice Allows the owner to set whether the contract should revert during prove batches operation - function setShouldRevertOnProveBatches(bool _shouldRevert) external onlyOwner { - shouldRevertOnProveBatches = _shouldRevert; - } - - /// @notice Allows the owner to set whether the contract should revert during execute batches operation - function setShouldRevertOnExecuteBatches(bool _shouldRevert) external onlyOwner { - shouldRevertOnExecuteBatches = _shouldRevert; - } - - function commitBatches( - StoredBatchInfo calldata _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData - ) public { - require(!shouldRevertOnCommitBatches, "DummyExecutor: shouldRevertOnCommitBatches"); - require( - _lastCommittedBatchData.batchNumber == getTotalBatchesCommitted, - "DummyExecutor: Invalid last committed batch number" - ); - - uint256 batchesLength = _newBatchesData.length; - for (uint256 i = 0; i < batchesLength; ++i) { - require(getTotalBatchesCommitted + i + 1 == _newBatchesData[i].batchNumber); - } - - getTotalBatchesCommitted += batchesLength; - } - - function commitBatchesSharedBridge( - uint256, - StoredBatchInfo calldata _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData - ) external { - commitBatches(_lastCommittedBatchData, _newBatchesData); - } - - function proveBatches( - StoredBatchInfo calldata _prevBatch, - StoredBatchInfo[] calldata _committedBatches, - ProofInput calldata - ) public { - require(!shouldRevertOnProveBatches, "DummyExecutor: shouldRevertOnProveBatches"); - require(_prevBatch.batchNumber == getTotalBatchesVerified, "DummyExecutor: Invalid previous batch number"); - - require(_committedBatches.length == 1, "DummyExecutor: Can prove only one batch"); - require( - _committedBatches[0].batchNumber == _prevBatch.batchNumber + 1, - "DummyExecutor 1: Can't prove batch out of order" - ); - - getTotalBatchesVerified += 1; - require( - getTotalBatchesVerified <= getTotalBatchesCommitted, - "DummyExecutor: prove more batches than were committed" - ); - } - - function proveBatchesSharedBridge( - uint256, - StoredBatchInfo calldata _prevBatch, - StoredBatchInfo[] calldata _committedBatches, - ProofInput calldata _proof - ) external { - proveBatches(_prevBatch, _committedBatches, _proof); - } - - function executeBatches(StoredBatchInfo[] calldata _batchesData) public { - require(!shouldRevertOnExecuteBatches, "DummyExecutor: shouldRevertOnExecuteBatches"); - uint256 nBatches = _batchesData.length; - for (uint256 i = 0; i < nBatches; ++i) { - require(_batchesData[i].batchNumber == getTotalBatchesExecuted + i + 1); - } - getTotalBatchesExecuted += nBatches; - require( - getTotalBatchesExecuted <= getTotalBatchesVerified, - "DummyExecutor 2: Can't execute batches more than committed and proven currently" - ); - } - - function executeBatchesSharedBridge(uint256, StoredBatchInfo[] calldata _batchesData) external { - executeBatches(_batchesData); - } - - function revertBatches(uint256 _newLastBatch) public { - require( - getTotalBatchesCommitted > _newLastBatch, - "DummyExecutor: The last committed batch is less than new last batch" - ); - uint256 newTotalBatchesCommitted = _maxU256(_newLastBatch, getTotalBatchesExecuted); - - if (newTotalBatchesCommitted < getTotalBatchesVerified) { - getTotalBatchesVerified = newTotalBatchesCommitted; - } - getTotalBatchesCommitted = newTotalBatchesCommitted; - } - - function revertBatchesSharedBridge(uint256, uint256 _newLastBatch) external { - revertBatches(_newLastBatch); - } - - /// @notice Returns larger of two values - function _maxU256(uint256 a, uint256 b) internal pure returns (uint256) { - return a < b ? b : a; - } -} diff --git a/l1-contracts/contracts/dev-contracts/test/DummySharedBridge.sol b/l1-contracts/contracts/dev-contracts/test/DummySharedBridge.sol index 6e61902f0..c75ec4530 100644 --- a/l1-contracts/contracts/dev-contracts/test/DummySharedBridge.sol +++ b/l1-contracts/contracts/dev-contracts/test/DummySharedBridge.sol @@ -2,32 +2,47 @@ pragma solidity 0.8.24; -import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; import {L2TransactionRequestTwoBridgesInner} from "../../bridgehub/IBridgehub.sol"; -import {TWO_BRIDGES_MAGIC_VALUE} from "../../common/Config.sol"; +import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/security/PausableUpgradeable.sol"; +import {TWO_BRIDGES_MAGIC_VALUE, ETH_TOKEN_ADDRESS} from "../../common/Config.sol"; +import {IL1NativeTokenVault} from "../../bridge/ntv/L1NativeTokenVault.sol"; +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "../../common/L2ContractAddresses.sol"; +import {SafeERC20} from "@openzeppelin/contracts-v4/token/ERC20/utils/SafeERC20.sol"; +import {IL2Bridge} from "../../bridge/interfaces/IL2Bridge.sol"; +import {IL2SharedBridgeLegacy} from "../../bridge/interfaces/IL2SharedBridgeLegacy.sol"; +import {IL2SharedBridgeLegacyFunctions} from "../../bridge/interfaces/IL2SharedBridgeLegacyFunctions.sol"; + +contract DummySharedBridge is PausableUpgradeable { + using SafeERC20 for IERC20; + + IL1NativeTokenVault public nativeTokenVault; -contract DummySharedBridge { event BridgehubDepositBaseTokenInitiated( uint256 indexed chainId, address indexed from, - address l1Token, + bytes32 assetId, uint256 amount ); bytes32 dummyL2DepositTxHash; - /// @dev Maps token balances for each chain to prevent unauthorized spending across hyperchains. + /// @dev Maps token balances for each chain to prevent unauthorized spending across zkChains. /// This serves as a security measure until hyperbridging is implemented. - mapping(uint256 chainId => mapping(address l1Token => uint256 balance)) internal chainBalance; + mapping(uint256 chainId => mapping(address l1Token => uint256 balance)) public chainBalance; /// @dev Indicates whether the hyperbridging is enabled for a given chain. - mapping(uint256 chainId => bool enabled) internal hyperbridgingEnabled; address l1ReceiverReturnInFinalizeWithdrawal; address l1TokenReturnInFinalizeWithdrawal; uint256 amountReturnInFinalizeWithdrawal; + /// @dev A mapping assetId => assetHandlerAddress + /// @dev Tracks the address of Asset Handler contracts, where bridged funds are locked for each asset + /// @dev P.S. this liquidity was locked directly in SharedBridge before + mapping(bytes32 assetId => address assetHandlerAddress) public assetHandlerAddress; + constructor(bytes32 _dummyL2DepositTxHash) { dummyL2DepositTxHash = _dummyL2DepositTxHash; } @@ -38,6 +53,8 @@ contract DummySharedBridge { amountReturnInFinalizeWithdrawal = _amount; } + function receiveEth(uint256 _chainId) external payable {} + function depositLegacyErc20Bridge( address, //_msgSender, address, //_l2Receiver, @@ -61,6 +78,18 @@ contract DummySharedBridge { bytes32[] calldata // _merkleProof ) external {} + function claimFailedDeposit( + uint256, // _chainId, + address, // _depositSender, + address, // _l1Asset, + uint256, // _amount, + bytes32, // _l2TxHash, + uint256, // _l2BatchNumber, + uint256, // _l2MessageIndex, + uint16, // _l2TxNumberInBatch, + bytes32[] calldata //_merkleProof + ) external {} + function finalizeWithdrawalLegacyErc20Bridge( uint256, //_l2BatchNumber, uint256, //_l2MessageIndex, @@ -75,28 +104,58 @@ contract DummySharedBridge { event Debugger(uint256); - function bridgehubDepositBaseToken( + function pause() external { + _pause(); + } + + function unpause() external { + _unpause(); + } + + // This function expects abi encoded data + function _parseL2WithdrawalMessage( + bytes memory _l2ToL1message + ) internal view returns (address l1Receiver, address l1Token, uint256 amount) { + (l1Receiver, l1Token, amount) = abi.decode(_l2ToL1message, (address, address, uint256)); + } + + // simple function to just transfer the funds + function finalizeWithdrawal( uint256 _chainId, - address _prevMsgSender, - address _l1Token, - uint256 _amount - ) external payable { - if (_l1Token == address(1)) { - require(msg.value == _amount, "L1SharedBridge: msg.value not equal to amount"); + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes calldata _message, + bytes32[] calldata _merkleProof + ) external returns (address l1Receiver, address l1Token, uint256 amount) { + (l1Receiver, l1Token, amount) = _parseL2WithdrawalMessage(_message); + + if (l1Token == address(1)) { + bool callSuccess; + // Low-level assembly call, to avoid any memory copying (save gas) + assembly { + callSuccess := call(gas(), l1Receiver, amount, 0, 0, 0, 0) + } + require(callSuccess, "ShB: withdraw failed"); } else { - // The Bridgehub also checks this, but we want to be sure - require(msg.value == 0, "ShB m.v > 0 b d.it"); - uint256 amount = _depositFunds(_prevMsgSender, IERC20(_l1Token), _amount); // note if _prevMsgSender is this contract, this will return 0. This does not happen. - require(amount == _amount, "3T"); // The token has non-standard transfer logic + // Withdraw funds + IERC20(l1Token).safeTransfer(l1Receiver, amount); } + } - if (!hyperbridgingEnabled[_chainId]) { - chainBalance[_chainId][_l1Token] += _amount; - } + function bridgehubDepositBaseToken( + uint256 _chainId, + bytes32 _assetId, + address _originalCaller, + uint256 _amount + ) external payable whenNotPaused { + // Dummy bridge supports only working with ETH for simplicity. + require(msg.value == _amount, "L1AR: msg.value not equal to amount"); + + chainBalance[_chainId][address(1)] += _amount; - emit Debugger(5); // Note that we don't save the deposited amount, as this is for the base token, which gets sent to the refundRecipient if the tx fails - emit BridgehubDepositBaseTokenInitiated(_chainId, _prevMsgSender, _l1Token, _amount); + emit BridgehubDepositBaseTokenInitiated(_chainId, _originalCaller, _assetId, _amount); } function _depositFunds(address _from, IERC20 _token, uint256 _amount) internal returns (uint256) { @@ -108,14 +167,33 @@ contract DummySharedBridge { } function bridgehubDeposit( - uint256, //_chainId, - address, //_prevMsgSender, - uint256, // l2Value, needed for Weth deposits in the future - bytes calldata //_data + uint256, + address _originalCaller, + uint256, + bytes calldata _data ) external payable returns (L2TransactionRequestTwoBridgesInner memory request) { - // Request the finalization of the deposit on the L2 side - bytes memory l2TxCalldata = bytes("0xabcd123"); - bytes32 txDataHash = bytes32("0x1212121212abf"); + (address _l1Token, uint256 _depositAmount, address _l2Receiver) = abi.decode( + _data, + (address, uint256, address) + ); + uint256 amount; + + if (_l1Token == ETH_TOKEN_ADDRESS) { + amount = msg.value; + require(_depositAmount == 0, "ShB wrong withdraw amount"); + } else { + require(msg.value == 0, "ShB m.v > 0 for BH d.it 2"); + amount = _depositAmount; + + uint256 withdrawAmount = _depositFunds(_originalCaller, IERC20(_l1Token), _depositAmount); + require(withdrawAmount == _depositAmount, "5T"); // The token has non-standard transfer logic + } + + bytes memory l2TxCalldata = abi.encodeCall( + IL2SharedBridgeLegacyFunctions.finalizeDeposit, + (_originalCaller, _l2Receiver, _l1Token, amount, new bytes(0)) + ); + bytes32 txDataHash = keccak256(abi.encode(_originalCaller, _l1Token, amount)); request = L2TransactionRequestTwoBridgesInner({ magicValue: TWO_BRIDGES_MAGIC_VALUE, @@ -127,4 +205,23 @@ contract DummySharedBridge { } function bridgehubConfirmL2Transaction(uint256 _chainId, bytes32 _txDataHash, bytes32 _txHash) external {} + + /// @dev Sets the L1ERC20Bridge contract address. Should be called only once. + function setNativeTokenVault(IL1NativeTokenVault _nativeTokenVault) external { + require(address(nativeTokenVault) == address(0), "L1AR: legacy bridge already set"); + require(address(_nativeTokenVault) != address(0), "L1AR: legacy bridge 0"); + nativeTokenVault = _nativeTokenVault; + } + + /// @dev Used to set the assedAddress for a given assetId. + function setAssetHandlerAddressThisChain(bytes32 _additionalData, address _assetHandlerAddress) external { + address sender = msg.sender == address(nativeTokenVault) ? L2_NATIVE_TOKEN_VAULT_ADDR : msg.sender; + bytes32 assetId = keccak256(abi.encode(uint256(block.chainid), sender, _additionalData)); + assetHandlerAddress[assetId] = _assetHandlerAddress; + // assetDeploymentTracker[assetId] = sender; + // emit AssetHandlerRegisteredInitial(assetId, _assetHandlerAddress, _additionalData, sender); + } + + // add this to be excluded from coverage report + function test() internal {} } diff --git a/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManager.sol b/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManager.sol deleted file mode 100644 index b13050318..000000000 --- a/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManager.sol +++ /dev/null @@ -1,23 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {EnumerableMap} from "@openzeppelin/contracts/utils/structs/EnumerableMap.sol"; - -import {StateTransitionManager} from "../../state-transition/StateTransitionManager.sol"; - -/// @title DummyExecutor -/// @notice A test smart contract implementing the IExecutor interface to simulate Executor behavior for testing purposes. -contract DummyStateTransitionManager is StateTransitionManager { - using EnumerableMap for EnumerableMap.UintToAddressMap; - - // add this to be excluded from coverage report - function test() internal virtual {} - - /// @notice Constructor - constructor() StateTransitionManager(address(0), type(uint256).max) {} - - function setHyperchain(uint256 _chainId, address _hyperchain) external { - hyperchainMap.set(_chainId, _hyperchain); - } -} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManagerWithBridgeHubAddress.sol b/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManagerWithBridgeHubAddress.sol deleted file mode 100644 index 883d74ca1..000000000 --- a/l1-contracts/contracts/dev-contracts/test/DummyStateTransitionManagerWithBridgeHubAddress.sol +++ /dev/null @@ -1,20 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {EnumerableMap} from "@openzeppelin/contracts/utils/structs/EnumerableMap.sol"; - -import {StateTransitionManager} from "../../state-transition/StateTransitionManager.sol"; - -/// @title DummyExecutor -/// @notice A test smart contract implementing the IExecutor interface to simulate Executor behavior for testing purposes. -contract DummyStateTransitionManagerWBH is StateTransitionManager { - using EnumerableMap for EnumerableMap.UintToAddressMap; - - /// @notice Constructor - constructor(address bridgeHub) StateTransitionManager(bridgeHub, type(uint256).max) {} - - function setHyperchain(uint256 _chainId, address _hyperchain) external { - hyperchainMap.set(_chainId, _hyperchain); - } -} diff --git a/l1-contracts/contracts/dev-contracts/test/DummyHyperchain.sol b/l1-contracts/contracts/dev-contracts/test/DummyZKChain.sol similarity index 69% rename from l1-contracts/contracts/dev-contracts/test/DummyHyperchain.sol rename to l1-contracts/contracts/dev-contracts/test/DummyZKChain.sol index b626142a7..9a535affe 100644 --- a/l1-contracts/contracts/dev-contracts/test/DummyHyperchain.sol +++ b/l1-contracts/contracts/dev-contracts/test/DummyZKChain.sol @@ -2,13 +2,21 @@ pragma solidity 0.8.24; import {MailboxFacet} from "../../state-transition/chain-deps/facets/Mailbox.sol"; -import {FeeParams, PubdataPricingMode} from "../../state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; +import {FeeParams, PubdataPricingMode} from "../../state-transition/chain-deps/ZKChainStorage.sol"; -contract DummyHyperchain is MailboxFacet { - constructor(address bridgeHubAddress, uint256 _eraChainId) MailboxFacet(_eraChainId) { +contract DummyZKChain is MailboxFacet { + constructor( + address bridgeHubAddress, + uint256 _eraChainId, + uint256 _l1ChainId + ) MailboxFacet(_eraChainId, _l1ChainId) { s.bridgehub = bridgeHubAddress; } + function getEraChainId() public view returns (uint256) { + return ERA_CHAIN_ID; + } + function setBridgeHubAddress(address bridgeHubAddress) public { s.bridgehub = bridgeHubAddress; } @@ -39,4 +47,13 @@ contract DummyHyperchain is MailboxFacet { minimalL2GasPrice: 250_000_000 }); } + + function genesisUpgrade( + address _l1GenesisUpgrade, + bytes calldata _forceDeploymentData, + bytes[] calldata _factoryDeps + ) external {} + + // add this to be excluded from coverage report + function test() internal {} } diff --git a/l1-contracts/contracts/dev-contracts/test/ExecutorProvingTest.sol b/l1-contracts/contracts/dev-contracts/test/ExecutorProvingTest.sol index 50bccb744..5794dfbe6 100644 --- a/l1-contracts/contracts/dev-contracts/test/ExecutorProvingTest.sol +++ b/l1-contracts/contracts/dev-contracts/test/ExecutorProvingTest.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {ExecutorFacet} from "../../state-transition/chain-deps/facets/Executor.sol"; -import {PubdataPricingMode} from "../../state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; +import {PubdataPricingMode} from "../../state-transition/chain-deps/ZKChainStorage.sol"; import {LogProcessingOutput} from "../../state-transition/chain-interfaces/IExecutor.sol"; import {LogProcessingOutput} from "../../state-transition/chain-interfaces/IExecutor.sol"; @@ -28,7 +28,7 @@ contract ExecutorProvingTest is ExecutorFacet { CommitBatchInfo calldata _newBatch, bytes32 _expectedSystemContractUpgradeTxHash, PubdataPricingMode - ) external pure returns (LogProcessingOutput memory logOutput) { + ) external view returns (LogProcessingOutput memory logOutput) { return _processL2Logs(_newBatch, _expectedSystemContractUpgradeTxHash); } @@ -38,4 +38,7 @@ contract ExecutorProvingTest is ExecutorFacet { s.l2BootloaderBytecodeHash = l2BootloaderBytecodeHash; s.zkPorterIsAvailable = false; } + + // add this to be excluded from coverage report + function test() internal {} } diff --git a/l1-contracts/contracts/dev-contracts/test/FullMerkleTest.sol b/l1-contracts/contracts/dev-contracts/test/FullMerkleTest.sol new file mode 100644 index 000000000..0a7245800 --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/FullMerkleTest.sol @@ -0,0 +1,55 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {FullMerkle} from "../../common/libraries/FullMerkle.sol"; + +contract FullMerkleTest { + using FullMerkle for FullMerkle.FullTree; + + FullMerkle.FullTree internal tree; + + constructor(bytes32 zero) { + tree.setup(zero); + } + + function pushNewLeaf(bytes32 _item) external { + tree.pushNewLeaf(_item); + } + + function updateLeaf(uint256 _index, bytes32 _item) external { + tree.updateLeaf(_index, _item); + } + + function updateAllLeaves(bytes32[] memory _items) external { + tree.updateAllLeaves(_items); + } + + function updateAllNodesAtHeight(uint256 _height, bytes32[] memory _items) external { + tree.updateAllNodesAtHeight(_height, _items); + } + + function root() external view returns (bytes32) { + return tree.root(); + } + + function height() external view returns (uint256) { + return tree._height; + } + + function index() external view returns (uint256) { + return tree._leafNumber; + } + + function node(uint256 _height, uint256 _index) external view returns (bytes32) { + return tree._nodes[_height][_index]; + } + + function nodeCount(uint256 _height) external view returns (uint256) { + return tree._nodes[_height].length; + } + + function zeros(uint256 _index) external view returns (bytes32) { + return tree._zeros[_index]; + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/IncrementalMerkleTest.sol b/l1-contracts/contracts/dev-contracts/test/IncrementalMerkleTest.sol new file mode 100644 index 000000000..b5850bb42 --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/IncrementalMerkleTest.sol @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {DynamicIncrementalMerkle} from "../../common/libraries/DynamicIncrementalMerkle.sol"; + +contract IncrementalMerkleTest { + using DynamicIncrementalMerkle for DynamicIncrementalMerkle.Bytes32PushTree; + + DynamicIncrementalMerkle.Bytes32PushTree internal tree; + + constructor(bytes32 zero) { + tree.setup(zero); + } + + function push(bytes32 _item) external { + tree.push(_item); + } + + function root() external view returns (bytes32) { + return tree.root(); + } + + function height() external view returns (uint256) { + return tree.height(); + } + + function index() external view returns (uint256) { + return tree._nextLeafIndex; + } + + function side(uint256 _index) external view returns (bytes32) { + return tree._sides[_index]; + } + + function zeros(uint256 _index) external view returns (bytes32) { + return tree._zeros[_index]; + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/L1ERC20BridgeTest.sol b/l1-contracts/contracts/dev-contracts/test/L1ERC20BridgeTest.sol index 034fb33d9..2f8eda079 100644 --- a/l1-contracts/contracts/dev-contracts/test/L1ERC20BridgeTest.sol +++ b/l1-contracts/contracts/dev-contracts/test/L1ERC20BridgeTest.sol @@ -3,12 +3,17 @@ pragma solidity 0.8.24; import {L1ERC20Bridge} from "../../bridge/L1ERC20Bridge.sol"; -import {IBridgehub, IL1SharedBridge} from "../../bridge/interfaces/IL1SharedBridge.sol"; +import {IL1NativeTokenVault} from "../../bridge/ntv/IL1NativeTokenVault.sol"; +import {IBridgehub} from "../../bridgehub/IBridgehub.sol"; +import {IL1AssetRouter} from "../../bridge/asset-router/IL1AssetRouter.sol"; +import {IL1Nullifier} from "../../bridge/interfaces/IL1Nullifier.sol"; /// @author Matter Labs contract L1ERC20BridgeTest is L1ERC20Bridge { // add this to be excluded from coverage report function test() internal virtual {} - constructor(IBridgehub _zkSync) L1ERC20Bridge(IL1SharedBridge(address(0))) {} + constructor( + IBridgehub _zkSync + ) L1ERC20Bridge(IL1Nullifier(address(0)), IL1AssetRouter(address(0)), IL1NativeTokenVault(address(0)), 1) {} } diff --git a/l1-contracts/contracts/dev-contracts/test/L2NativeTokenVaultDev.sol b/l1-contracts/contracts/dev-contracts/test/L2NativeTokenVaultDev.sol new file mode 100644 index 000000000..fc1991503 --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/L2NativeTokenVaultDev.sol @@ -0,0 +1,78 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; +import {Create2} from "@openzeppelin/contracts-v4/utils/Create2.sol"; +import {IBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/IBeacon.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; + +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {NativeTokenVault} from "contracts/bridge/ntv/NativeTokenVault.sol"; +import {L2NativeTokenVault} from "contracts/bridge/ntv/L2NativeTokenVault.sol"; +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; + +/// @author Matter Labs +/// @notice This is used for fast debugging of the L2NTV by running it in L1 context, i.e. normal foundry instead of foundry --zksync. +contract L2NativeTokenVaultDev is L2NativeTokenVault { + constructor( + uint256 _l1ChainId, + address _aliasedOwner, + bytes32 _l2TokenProxyBytecodeHash, + address _legacySharedBridge, + address _bridgedTokenBeacon, + bool _contractsDeployedAlready, + address _wethToken, + bytes32 _baseTokenAssetId + ) + L2NativeTokenVault( + _l1ChainId, + _aliasedOwner, + _l2TokenProxyBytecodeHash, + _legacySharedBridge, + _bridgedTokenBeacon, + _contractsDeployedAlready, + _wethToken, + _baseTokenAssetId + ) + {} + + /// @notice copied from L1NTV for L1 compilation + function calculateCreate2TokenAddress( + uint256 _originChainId, + address _l1Token + ) public view override(L2NativeTokenVault) returns (address) { + bytes32 salt = _getCreate2Salt(_originChainId, _l1Token); + return + Create2.computeAddress( + salt, + keccak256(abi.encodePacked(type(BeaconProxy).creationCode, abi.encode(bridgedTokenBeacon, ""))) + ); + } + + function deployBridgedStandardERC20(address _owner) external { + _transferOwnership(_owner); + + address l2StandardToken = address(new BridgedStandardERC20{salt: bytes32(0)}()); + + UpgradeableBeacon tokenBeacon = new UpgradeableBeacon{salt: bytes32(0)}(l2StandardToken); + + tokenBeacon.transferOwnership(owner()); + bridgedTokenBeacon = IBeacon(address(tokenBeacon)); + emit L2TokenBeaconUpdated(address(bridgedTokenBeacon), l2TokenProxyBytecodeHash); + } + + function test() external pure { + // test + } + + function _deployBeaconProxy(bytes32 _salt) internal virtual override returns (BeaconProxy proxy) { + // Use CREATE2 to deploy the BeaconProxy + address proxyAddress = Create2.deploy( + 0, + _salt, + abi.encodePacked(type(BeaconProxy).creationCode, abi.encode(bridgedTokenBeacon, "")) + ); + return BeaconProxy(payable(proxyAddress)); + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/MailboxFacetTest.sol b/l1-contracts/contracts/dev-contracts/test/MailboxFacetTest.sol index d5a415510..5b132f64c 100644 --- a/l1-contracts/contracts/dev-contracts/test/MailboxFacetTest.sol +++ b/l1-contracts/contracts/dev-contracts/test/MailboxFacetTest.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.24; -import {FeeParams} from "../../state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; +import {FeeParams} from "../../state-transition/chain-deps/ZKChainStorage.sol"; import {MailboxFacet} from "../../state-transition/chain-deps/facets/Mailbox.sol"; import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "../../common/Config.sol"; @@ -10,7 +10,7 @@ contract MailboxFacetTest is MailboxFacet { // add this to be excluded from coverage report function test() internal virtual {} - constructor(uint256 _eraChainId) MailboxFacet(_eraChainId) { + constructor(uint256 _eraChainId, uint256 _l1ChainId) MailboxFacet(_eraChainId, _l1ChainId) { s.admin = msg.sender; } diff --git a/l1-contracts/contracts/dev-contracts/test/MerkleTest.sol b/l1-contracts/contracts/dev-contracts/test/MerkleTest.sol index 7db97f8be..f270ba30e 100644 --- a/l1-contracts/contracts/dev-contracts/test/MerkleTest.sol +++ b/l1-contracts/contracts/dev-contracts/test/MerkleTest.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.24; -import {Merkle} from "../../state-transition/libraries/Merkle.sol"; +import {Merkle} from "../../common/libraries/Merkle.sol"; contract MerkleTest { function calculateRoot( @@ -12,4 +12,13 @@ contract MerkleTest { ) external pure returns (bytes32) { return Merkle.calculateRoot(_path, _index, _itemHash); } + + function calculateRoot( + bytes32[] calldata _startPath, + bytes32[] calldata _endPath, + uint256 _startIndex, + bytes32[] calldata _itemHashes + ) external pure returns (bytes32) { + return Merkle.calculateRootPaths(_startPath, _endPath, _startIndex, _itemHashes); + } } diff --git a/l1-contracts/contracts/dev-contracts/test/MockExecutor.sol b/l1-contracts/contracts/dev-contracts/test/MockExecutor.sol index 9c7878b77..954c32ca2 100644 --- a/l1-contracts/contracts/dev-contracts/test/MockExecutor.sol +++ b/l1-contracts/contracts/dev-contracts/test/MockExecutor.sol @@ -2,9 +2,9 @@ pragma solidity 0.8.24; -import {ZkSyncHyperchainBase} from "../../state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol"; +import {ZKChainBase} from "../../state-transition/chain-deps/facets/ZKChainBase.sol"; -contract MockExecutorFacet is ZkSyncHyperchainBase { +contract MockExecutorFacet is ZKChainBase { // add this to be excluded from coverage report function test() internal virtual {} @@ -12,4 +12,10 @@ contract MockExecutorFacet is ZkSyncHyperchainBase { s.totalBatchesExecuted = _batchNumber; s.l2LogsRootHashes[_batchNumber] = _l2LogsTreeRoot; } + + function setExecutedBatches(uint256 _batchNumber) external { + s.totalBatchesExecuted = _batchNumber; + s.totalBatchesCommitted = _batchNumber; + s.totalBatchesVerified = _batchNumber; + } } diff --git a/l1-contracts/contracts/dev-contracts/test/PriorityTreeTest.sol b/l1-contracts/contracts/dev-contracts/test/PriorityTreeTest.sol new file mode 100644 index 000000000..0409c1a4c --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/PriorityTreeTest.sol @@ -0,0 +1,49 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {PriorityTree, PriorityOpsBatchInfo, PriorityTreeCommitment} from "../../state-transition/libraries/PriorityTree.sol"; + +contract PriorityTreeTest { + PriorityTree.Tree priorityTree; + + constructor() { + PriorityTree.setup(priorityTree, 0); + } + + function getFirstUnprocessedPriorityTx() external view returns (uint256) { + return PriorityTree.getFirstUnprocessedPriorityTx(priorityTree); + } + + function getTotalPriorityTxs() external view returns (uint256) { + return PriorityTree.getTotalPriorityTxs(priorityTree); + } + + function getSize() external view returns (uint256) { + return PriorityTree.getSize(priorityTree); + } + + function push(bytes32 _hash) external { + return PriorityTree.push(priorityTree, _hash); + } + + function getRoot() external view returns (bytes32) { + return PriorityTree.getRoot(priorityTree); + } + + function processBatch(PriorityOpsBatchInfo calldata _priorityOpsData) external { + PriorityTree.processBatch(priorityTree, _priorityOpsData); + } + + function getCommitment() external view returns (PriorityTreeCommitment memory) { + return PriorityTree.getCommitment(priorityTree); + } + + function initFromCommitment(PriorityTreeCommitment calldata _commitment) external { + PriorityTree.initFromCommitment(priorityTree, _commitment); + } + + function getZero() external view returns (bytes32) { + return priorityTree.tree._zeros[0]; + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/ReenterGovernance.sol b/l1-contracts/contracts/dev-contracts/test/ReenterGovernance.sol index 0d619c5ba..193f8085f 100644 --- a/l1-contracts/contracts/dev-contracts/test/ReenterGovernance.sol +++ b/l1-contracts/contracts/dev-contracts/test/ReenterGovernance.sol @@ -3,6 +3,7 @@ pragma solidity 0.8.24; import {IGovernance} from "../../governance/IGovernance.sol"; +import {Call} from "../../governance/Common.sol"; contract ReenterGovernance { // add this to be excluded from coverage report @@ -12,7 +13,7 @@ contract ReenterGovernance { // Store call, predecessor and salt separately, // because Operation struct can't be stored on storage. - IGovernance.Call call; + Call call; bytes32 predecessor; bytes32 salt; @@ -45,7 +46,7 @@ contract ReenterGovernance { fallback() external payable { if (!alreadyReentered) { alreadyReentered = true; - IGovernance.Call[] memory calls = new IGovernance.Call[](1); + Call[] memory calls = new Call[](1); calls[0] = call; IGovernance.Operation memory op = IGovernance.Operation({ calls: calls, diff --git a/l1-contracts/contracts/dev-contracts/test/TestCalldataDA.sol b/l1-contracts/contracts/dev-contracts/test/TestCalldataDA.sol new file mode 100644 index 000000000..aff7e50ca --- /dev/null +++ b/l1-contracts/contracts/dev-contracts/test/TestCalldataDA.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {CalldataDA} from "../../state-transition/data-availability/CalldataDA.sol"; + +contract TestCalldataDA is CalldataDA { + function processL2RollupDAValidatorOutputHash( + bytes32 _l2DAValidatorOutputHash, + uint256 _maxBlobsSupported, + bytes calldata _operatorDAInput + ) + external + pure + returns ( + bytes32 stateDiffHash, + bytes32 fullPubdataHash, + bytes32[] memory blobsLinearHashes, + uint256 blobsProvided, + bytes calldata l1DaInput + ) + { + return _processL2RollupDAValidatorOutputHash(_l2DAValidatorOutputHash, _maxBlobsSupported, _operatorDAInput); + } + + function processCalldataDA( + uint256 _blobsProvided, + bytes32 _fullPubdataHash, + uint256 _maxBlobsSupported, + bytes calldata _pubdataInput + ) external pure returns (bytes32[] memory blobCommitments, bytes calldata _pubdata) { + return _processCalldataDA(_blobsProvided, _fullPubdataHash, _maxBlobsSupported, _pubdataInput); + } +} diff --git a/l1-contracts/contracts/dev-contracts/test/TestExecutor.sol b/l1-contracts/contracts/dev-contracts/test/TestExecutor.sol index 8da6425b3..10b907aec 100644 --- a/l1-contracts/contracts/dev-contracts/test/TestExecutor.sol +++ b/l1-contracts/contracts/dev-contracts/test/TestExecutor.sol @@ -5,10 +5,14 @@ import {ExecutorFacet} from "../../state-transition/chain-deps/facets/Executor.s pragma solidity 0.8.24; contract TestExecutor is ExecutorFacet { - /// @dev Since we want to test the blob functionality we want mock the calls to the blobhash opcode. - function _getBlobVersionedHash(uint256 _index) internal view virtual override returns (bytes32 versionedHash) { - (bool success, bytes memory data) = s.blobVersionedHashRetriever.staticcall(abi.encode(_index)); - require(success, "vc"); - versionedHash = abi.decode(data, (bytes32)); + function setPriorityTreeStartIndex(uint256 _startIndex) external { + s.priorityTree.startIndex = _startIndex; } + + // /// @dev Since we want to test the blob functionality we want mock the calls to the blobhash opcode. + // function _getBlobVersionedHash(uint256 _index) internal view virtual override returns (bytes32 versionedHash) { + // (bool success, bytes memory data) = s.blobVersionedHashRetriever.staticcall(abi.encode(_index)); + // require(success, "vc"); + // versionedHash = abi.decode(data, (bytes32)); + // } } diff --git a/l1-contracts/contracts/governance/AccessControlRestriction.sol b/l1-contracts/contracts/governance/AccessControlRestriction.sol new file mode 100644 index 000000000..3fc67f875 --- /dev/null +++ b/l1-contracts/contracts/governance/AccessControlRestriction.sol @@ -0,0 +1,72 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {AccessToFallbackDenied, AccessToFunctionDenied} from "../common/L1ContractErrors.sol"; +import {IAccessControlRestriction} from "./IAccessControlRestriction.sol"; +import {AccessControlDefaultAdminRules} from "@openzeppelin/contracts-v4/access/AccessControlDefaultAdminRules.sol"; +import {IRestriction} from "./IRestriction.sol"; +import {Call} from "./Common.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice The Restriction that is designed to provide the access control logic for the `ChainAdmin` contract. +/// @dev It inherits from `AccessControlDefaultAdminRules` without overriding `_setRoleAdmin` functionaity. In other +/// words, the `DEFAULT_ADMIN_ROLE` is the only role that can manage roles. This is done for simplicity. +/// @dev An instance of this restriction should be deployed separately for each `ChainAdmin` contract. +/// @dev IMPORTANT: this function does not validate the ability of the invoker to use `msg.value`. Thus, +/// either all callers with access to functions should be trusted to not steal ETH from the `ChainAdmin` account +/// or not ETH should be passively stored in `ChainAdmin` account. +contract AccessControlRestriction is IRestriction, IAccessControlRestriction, AccessControlDefaultAdminRules { + /// @notice Required roles to call a specific functions. + /// @dev Note, that the role 0 means the `DEFAULT_ADMIN_ROLE` from the `AccessControlDefaultAdminRules` contract. + mapping(address target => mapping(bytes4 selector => bytes32 requiredRole)) public requiredRoles; + + /// @notice Required roles to call a fallback function. + mapping(address target => bytes32 requiredRole) public requiredRolesForFallback; + + constructor( + uint48 initialDelay, + address initialDefaultAdmin + ) AccessControlDefaultAdminRules(initialDelay, initialDefaultAdmin) {} + + /// @notice Sets the required role for a specific function call. + /// @param _target The address of the contract. + /// @param _selector The selector of the function. + /// @param _requiredRole The required role. + function setRequiredRoleForCall( + address _target, + bytes4 _selector, + bytes32 _requiredRole + ) external onlyRole(DEFAULT_ADMIN_ROLE) { + requiredRoles[_target][_selector] = _requiredRole; + + emit RoleSet(_target, _selector, _requiredRole); + } + + /// @notice Sets the required role for a fallback function call. + /// @param _target The address of the contract. + /// @param _requiredRole The required role. + function setRequiredRoleForFallback(address _target, bytes32 _requiredRole) external onlyRole(DEFAULT_ADMIN_ROLE) { + requiredRolesForFallback[_target] = _requiredRole; + + emit FallbackRoleSet(_target, _requiredRole); + } + + /// @inheritdoc IRestriction + function validateCall(Call calldata _call, address _invoker) external view { + // Note, that since `DEFAULT_ADMIN_ROLE` is 0 and the default storage value for the + // `requiredRoles` and `requiredRolesForFallback` is 0, the default admin is by default a required + // role for all the functions. + if (_call.data.length < 4) { + if (!hasRole(requiredRolesForFallback[_call.target], _invoker)) { + revert AccessToFallbackDenied(_call.target, _invoker); + } + } else { + bytes4 selector = bytes4(_call.data[:4]); + if (!hasRole(requiredRoles[_call.target][selector], _invoker)) { + revert AccessToFunctionDenied(_call.target, selector, _invoker); + } + } + } +} diff --git a/l1-contracts/contracts/governance/ChainAdmin.sol b/l1-contracts/contracts/governance/ChainAdmin.sol index 874255d38..f6a93146f 100644 --- a/l1-contracts/contracts/governance/ChainAdmin.sol +++ b/l1-contracts/contracts/governance/ChainAdmin.sol @@ -2,45 +2,76 @@ pragma solidity 0.8.24; -import {Ownable2Step} from "@openzeppelin/contracts/access/Ownable2Step.sol"; +// solhint-disable gas-length-in-loops + +import {NoCallsProvided, OnlySelfAllowed, RestrictionWasNotPresent, RestrictionWasAlreadyPresent} from "../common/L1ContractErrors.sol"; import {IChainAdmin} from "./IChainAdmin.sol"; -import {IAdmin} from "../state-transition/chain-interfaces/IAdmin.sol"; +import {IRestriction} from "./IRestriction.sol"; +import {Call} from "./Common.sol"; + +import {EnumerableSet} from "@openzeppelin/contracts-v4/utils/structs/EnumerableSet.sol"; +import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev /// @notice The contract is designed to hold the `admin` role in ZKSync Chain (State Transition) contracts. /// The owner of the contract can perform any external calls and also save the information needed for -/// the blockchain node to accept the protocol upgrade. Another role - `tokenMultiplierSetter` can be used in the contract -/// to change the base token gas price in the Chain contract. -contract ChainAdmin is IChainAdmin, Ownable2Step { +/// the blockchain node to accept the protocol upgrade. +contract ChainAdmin is IChainAdmin, ReentrancyGuard { + using EnumerableSet for EnumerableSet.AddressSet; + + /// @notice Ensures that only the `ChainAdmin` contract itself can call the function. + /// @dev All functions that require access-control should use `onlySelf` modifier, while the access control logic + /// should be implemented in the restriction contracts. + modifier onlySelf() { + if (msg.sender != address(this)) { + revert OnlySelfAllowed(); + } + _; + } + + constructor(address[] memory _initialRestrictions) reentrancyGuardInitializer { + unchecked { + for (uint256 i = 0; i < _initialRestrictions.length; ++i) { + _addRestriction(_initialRestrictions[i]); + } + } + } + /// @notice Mapping of protocol versions to their expected upgrade timestamps. /// @dev Needed for the offchain node administration to know when to start building batches with the new protocol version. mapping(uint256 protocolVersion => uint256 upgradeTimestamp) public protocolVersionToUpgradeTimestamp; - /// @notice The address which can call `setTokenMultiplier` function to change the base token gas price in the Chain contract. - /// @dev The token base price can be changed quite often, so the private key for this role is supposed to be stored in the node - /// and used by the automated service in a way similar to the sequencer workflow. - address public tokenMultiplierSetter; - - constructor(address _initialOwner, address _initialTokenMultiplierSetter) { - require(_initialOwner != address(0), "Initial owner should be non zero address"); - _transferOwnership(_initialOwner); - // Can be zero if no one has this permission. - tokenMultiplierSetter = _initialTokenMultiplierSetter; - emit NewTokenMultiplierSetter(address(0), _initialTokenMultiplierSetter); + /// @notice The set of active restrictions. + EnumerableSet.AddressSet internal activeRestrictions; + + /// @notice Returns the list of active restrictions. + function getRestrictions() public view returns (address[] memory) { + return activeRestrictions.values(); + } + + /// @inheritdoc IChainAdmin + function isRestrictionActive(address _restriction) external view returns (bool) { + return activeRestrictions.contains(_restriction); + } + + /// @inheritdoc IChainAdmin + function addRestriction(address _restriction) external onlySelf { + _addRestriction(_restriction); } - /// @notice Updates the address responsible for setting token multipliers on the Chain contract . - /// @param _tokenMultiplierSetter The new address to be set as the token multiplier setter. - function setTokenMultiplierSetter(address _tokenMultiplierSetter) external onlyOwner { - emit NewTokenMultiplierSetter(tokenMultiplierSetter, _tokenMultiplierSetter); - tokenMultiplierSetter = _tokenMultiplierSetter; + /// @inheritdoc IChainAdmin + function removeRestriction(address _restriction) external onlySelf { + if (!activeRestrictions.remove(_restriction)) { + revert RestrictionWasNotPresent(_restriction); + } + emit RestrictionRemoved(_restriction); } /// @notice Set the expected upgrade timestamp for a specific protocol version. /// @param _protocolVersion The ZKsync chain protocol version. /// @param _upgradeTimestamp The timestamp at which the chain node should expect the upgrade to happen. - function setUpgradeTimestamp(uint256 _protocolVersion, uint256 _upgradeTimestamp) external onlyOwner { + function setUpgradeTimestamp(uint256 _protocolVersion, uint256 _upgradeTimestamp) external onlySelf { protocolVersionToUpgradeTimestamp[_protocolVersion] = _upgradeTimestamp; emit UpdateUpgradeTimestamp(_protocolVersion, _upgradeTimestamp); } @@ -49,9 +80,16 @@ contract ChainAdmin is IChainAdmin, Ownable2Step { /// @param _calls Array of Call structures defining target, value, and data for each call. /// @param _requireSuccess If true, reverts transaction on any call failure. /// @dev Intended for batch processing of contract interactions, managing gas efficiency and atomicity of operations. - function multicall(Call[] calldata _calls, bool _requireSuccess) external payable onlyOwner { - require(_calls.length > 0, "No calls provided"); + /// @dev Note, that this function lacks access control. It is expected that the access control is implemented in a separate restriction contract. + /// @dev Even though all the validation from external modules is executed via `staticcall`, the function + /// is marked as `nonReentrant` to prevent reentrancy attacks in case the staticcall restriction is lifted in the future. + function multicall(Call[] calldata _calls, bool _requireSuccess) external payable nonReentrant { + if (_calls.length == 0) { + revert NoCallsProvided(); + } for (uint256 i = 0; i < _calls.length; ++i) { + _validateCall(_calls[i]); + // slither-disable-next-line arbitrary-send-eth (bool success, bytes memory returnData) = _calls[i].target.call{value: _calls[i].value}(_calls[i].data); if (_requireSuccess && !success) { @@ -64,15 +102,27 @@ contract ChainAdmin is IChainAdmin, Ownable2Step { } } - /// @notice Sets the token multiplier in the specified Chain contract. - /// @param _chainContract The chain contract address where the token multiplier will be set. - /// @param _nominator The numerator part of the token multiplier. - /// @param _denominator The denominator part of the token multiplier. - function setTokenMultiplier(IAdmin _chainContract, uint128 _nominator, uint128 _denominator) external { - require(msg.sender == tokenMultiplierSetter, "Only the token multiplier setter can call this function"); - _chainContract.setTokenMultiplier(_nominator, _denominator); - } - /// @dev Contract might receive/hold ETH as part of the maintenance process. receive() external payable {} + + /// @notice Function that returns the current admin can perform the call. + /// @dev By default it always returns true, but can be overridden in derived contracts. + function _validateCall(Call calldata _call) internal view { + address[] memory restrictions = getRestrictions(); + + unchecked { + for (uint256 i = 0; i < restrictions.length; ++i) { + IRestriction(restrictions[i]).validateCall(_call, msg.sender); + } + } + } + + /// @notice Adds a new restriction to the active restrictions set. + /// @param _restriction The address of the restriction contract to be added. + function _addRestriction(address _restriction) internal { + if (!activeRestrictions.add(_restriction)) { + revert RestrictionWasAlreadyPresent(_restriction); + } + emit RestrictionAdded(_restriction); + } } diff --git a/l1-contracts/contracts/governance/Common.sol b/l1-contracts/contracts/governance/Common.sol new file mode 100644 index 000000000..fd73dd793 --- /dev/null +++ b/l1-contracts/contracts/governance/Common.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @dev Represents a call to be made during multicall. +/// @param target The address to which the call will be made. +/// @param value The amount of Ether (in wei) to be sent along with the call. +/// @param data The calldata to be executed on the `target` address. +struct Call { + address target; + uint256 value; + bytes data; +} diff --git a/l1-contracts/contracts/governance/Governance.sol b/l1-contracts/contracts/governance/Governance.sol index 42ac46e02..7b2182e1c 100644 --- a/l1-contracts/contracts/governance/Governance.sol +++ b/l1-contracts/contracts/governance/Governance.sol @@ -2,8 +2,10 @@ pragma solidity 0.8.24; -import {Ownable2Step} from "@openzeppelin/contracts/access/Ownable2Step.sol"; +import {Ownable2Step} from "@openzeppelin/contracts-v4/access/Ownable2Step.sol"; import {IGovernance} from "./IGovernance.sol"; +import {Call} from "./Common.sol"; +import {ZeroAddress, Unauthorized, OperationMustBeReady, OperationMustBePending, OperationExists, InvalidDelay, PreviousOperationNotExecuted} from "../common/L1ContractErrors.sol"; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev @@ -11,7 +13,7 @@ import {IGovernance} from "./IGovernance.sol"; /// @notice This contract manages operations (calls with preconditions) for governance tasks. /// The contract allows for operations to be scheduled, executed, and canceled with /// appropriate permissions and delays. It is used for managing and coordinating upgrades -/// and changes in all zkSync hyperchain governed contracts. +/// and changes in all ZK chain governed contracts. /// /// Operations can be proposed as either fully transparent upgrades with on-chain data, /// or "shadow" upgrades where upgrade data is not published on-chain before execution. Proposed operations @@ -38,8 +40,11 @@ contract Governance is IGovernance, Ownable2Step { /// @param _admin The address to be assigned as the admin of the contract. /// @param _securityCouncil The address to be assigned as the security council of the contract. /// @param _minDelay The initial minimum delay (in seconds) to be set for operations. + /// @dev We allow for a zero address for _securityCouncil because it can be set later constructor(address _admin, address _securityCouncil, uint256 _minDelay) { - require(_admin != address(0), "Admin should be non zero address"); + if (_admin == address(0)) { + revert ZeroAddress(); + } _transferOwnership(_admin); @@ -56,22 +61,25 @@ contract Governance is IGovernance, Ownable2Step { /// @notice Checks that the message sender is contract itself. modifier onlySelf() { - require(msg.sender == address(this), "Only governance contract itself is allowed to call this function"); + if (msg.sender != address(this)) { + revert Unauthorized(msg.sender); + } _; } /// @notice Checks that the message sender is an active security council. modifier onlySecurityCouncil() { - require(msg.sender == securityCouncil, "Only security council is allowed to call this function"); + if (msg.sender != securityCouncil) { + revert Unauthorized(msg.sender); + } _; } /// @notice Checks that the message sender is an active owner or an active security council. modifier onlyOwnerOrSecurityCouncil() { - require( - msg.sender == owner() || msg.sender == securityCouncil, - "Only the owner and security council are allowed to call this function" - ); + if (msg.sender != owner() && msg.sender != securityCouncil) { + revert Unauthorized(msg.sender); + } _; } @@ -152,7 +160,9 @@ contract Governance is IGovernance, Ownable2Step { /// @dev Only owner can call this function. /// @param _id Proposal id value (see `hashOperation`) function cancel(bytes32 _id) external onlyOwner { - require(isOperationPending(_id), "Operation must be pending"); + if (!isOperationPending(_id)) { + revert OperationMustBePending(); + } delete timestamps[_id]; emit OperationCancelled(_id); } @@ -170,13 +180,17 @@ contract Governance is IGovernance, Ownable2Step { // Check if the predecessor operation is completed. _checkPredecessorDone(_operation.predecessor); // Ensure that the operation is ready to proceed. - require(isOperationReady(id), "Operation must be ready before execution"); + if (!isOperationReady(id)) { + revert OperationMustBeReady(); + } // Execute operation. // slither-disable-next-line reentrancy-eth _execute(_operation.calls); // Reconfirming that the operation is still ready after execution. // This is needed to avoid unexpected reentrancy attacks of re-executing the same operation. - require(isOperationReady(id), "Operation must be ready after execution"); + if (!isOperationReady(id)) { + revert OperationMustBeReady(); + } // Set operation to be done timestamps[id] = EXECUTED_PROPOSAL_TIMESTAMP; emit OperationExecuted(id); @@ -191,13 +205,17 @@ contract Governance is IGovernance, Ownable2Step { // Check if the predecessor operation is completed. _checkPredecessorDone(_operation.predecessor); // Ensure that the operation is in a pending state before proceeding. - require(isOperationPending(id), "Operation must be pending before execution"); + if (!isOperationPending(id)) { + revert OperationMustBePending(); + } // Execute operation. // slither-disable-next-line reentrancy-eth _execute(_operation.calls); // Reconfirming that the operation is still pending before execution. // This is needed to avoid unexpected reentrancy attacks of re-executing the same operation. - require(isOperationPending(id), "Operation must be pending after execution"); + if (!isOperationPending(id)) { + revert OperationMustBePending(); + } // Set operation to be done timestamps[id] = EXECUTED_PROPOSAL_TIMESTAMP; emit OperationExecuted(id); @@ -217,8 +235,12 @@ contract Governance is IGovernance, Ownable2Step { /// @param _id The operation hash (see `hashOperation` function) /// @param _delay The delay time (in seconds) after which the proposed upgrade can be executed by the owner. function _schedule(bytes32 _id, uint256 _delay) internal { - require(!isOperation(_id), "Operation with this proposal id already exists"); - require(_delay >= minDelay, "Proposed delay is less than minimum delay"); + if (isOperation(_id)) { + revert OperationExists(); + } + if (_delay < minDelay) { + revert InvalidDelay(); + } timestamps[_id] = block.timestamp + _delay; } @@ -226,6 +248,8 @@ contract Governance is IGovernance, Ownable2Step { /// @dev Execute an operation's calls. /// @param _calls The array of calls to be executed. function _execute(Call[] calldata _calls) internal { + // We disable this check because calldata array length is cheap. + // solhint-disable-next-line gas-length-in-loops for (uint256 i = 0; i < _calls.length; ++i) { // slither-disable-next-line arbitrary-send-eth (bool success, bytes memory returnData) = _calls[i].target.call{value: _calls[i].value}(_calls[i].data); @@ -242,7 +266,9 @@ contract Governance is IGovernance, Ownable2Step { /// @param _predecessorId The hash of the operation that should be completed. /// @dev Doesn't check the operation to be complete if the input is zero. function _checkPredecessorDone(bytes32 _predecessorId) internal view { - require(_predecessorId == bytes32(0) || isOperationDone(_predecessorId), "Predecessor operation not completed"); + if (_predecessorId != bytes32(0) && !isOperationDone(_predecessorId)) { + revert PreviousOperationNotExecuted(); + } } /*////////////////////////////////////////////////////////////// diff --git a/l1-contracts/contracts/governance/IAccessControlRestriction.sol b/l1-contracts/contracts/governance/IAccessControlRestriction.sol new file mode 100644 index 000000000..3c9cfb5c5 --- /dev/null +++ b/l1-contracts/contracts/governance/IAccessControlRestriction.sol @@ -0,0 +1,14 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @title AccessControlRestriction contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IAccessControlRestriction { + /// @notice Emitted when the required role for a specific function is set. + event RoleSet(address indexed target, bytes4 indexed selector, bytes32 requiredRole); + + /// @notice Emitted when the required role for a fallback function is set. + event FallbackRoleSet(address indexed target, bytes32 requiredRole); +} diff --git a/l1-contracts/contracts/governance/IChainAdmin.sol b/l1-contracts/contracts/governance/IChainAdmin.sol index d5d8f117c..6dba4dfa8 100644 --- a/l1-contracts/contracts/governance/IChainAdmin.sol +++ b/l1-contracts/contracts/governance/IChainAdmin.sol @@ -2,36 +2,46 @@ pragma solidity 0.8.24; -import {IAdmin} from "../state-transition/chain-interfaces/IAdmin.sol"; +import {Call} from "./Common.sol"; /// @title ChainAdmin contract interface /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev interface IChainAdmin { - /// @dev Represents a call to be made during multicall. - /// @param target The address to which the call will be made. - /// @param value The amount of Ether (in wei) to be sent along with the call. - /// @param data The calldata to be executed on the `target` address. - struct Call { - address target; - uint256 value; - bytes data; - } - /// @notice Emitted when the expected upgrade timestamp for a specific protocol version is set. - event UpdateUpgradeTimestamp(uint256 indexed _protocolVersion, uint256 _upgradeTimestamp); + event UpdateUpgradeTimestamp(uint256 indexed protocolVersion, uint256 upgradeTimestamp); /// @notice Emitted when the call is executed from the contract. - event CallExecuted(Call _call, bool _success, bytes _returnData); + event CallExecuted(Call call, bool success, bytes returnData); - /// @notice Emitted when the new token multiplier address is set. - event NewTokenMultiplierSetter(address _oldTokenMultiplierSetter, address _newTokenMultiplierSetter); + /// @notice Emitted when a new restriction is added. + event RestrictionAdded(address indexed restriction); - function setTokenMultiplierSetter(address _tokenMultiplierSetter) external; + /// @notice Emitted when a restriction is removed. + event RestrictionRemoved(address indexed restriction); - function setUpgradeTimestamp(uint256 _protocolVersion, uint256 _upgradeTimestamp) external; + /// @notice Returns the list of active restrictions. + function getRestrictions() external view returns (address[] memory); - function multicall(Call[] calldata _calls, bool _requireSuccess) external payable; + /// @notice Checks if the restriction is active. + /// @param _restriction The address of the restriction contract. + function isRestrictionActive(address _restriction) external view returns (bool); + + /// @notice Adds a new restriction to the active restrictions set. + /// @param _restriction The address of the restriction contract. + function addRestriction(address _restriction) external; - function setTokenMultiplier(IAdmin _chainContract, uint128 _nominator, uint128 _denominator) external; + /// @notice Removes a restriction from the active restrictions set. + /// @param _restriction The address of the restriction contract. + /// @dev Sometimes restrictions might need to enforce their permanence (e.g. if a chain should be a rollup forever). + function removeRestriction(address _restriction) external; + + /// @notice Execute multiple calls as part of contract administration. + /// @param _calls Array of Call structures defining target, value, and data for each call. + /// @param _requireSuccess If true, reverts transaction on any call failure. + /// @dev Intended for batch processing of contract interactions, managing gas efficiency and atomicity of operations. + /// @dev Note, that this function lacks access control. It is expected that the access control is implemented in a separate restriction contract. + /// @dev Even though all the validation from external modules is executed via `staticcall`, the function + /// is marked as `nonReentrant` to prevent reentrancy attacks in case the staticcall restriction is lifted in the future. + function multicall(Call[] calldata _calls, bool _requireSuccess) external payable; } diff --git a/l1-contracts/contracts/governance/IGovernance.sol b/l1-contracts/contracts/governance/IGovernance.sol index 2b0228203..0cb478573 100644 --- a/l1-contracts/contracts/governance/IGovernance.sol +++ b/l1-contracts/contracts/governance/IGovernance.sol @@ -1,6 +1,8 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; +import {Call} from "./Common.sol"; /// @title Governance contract interface /// @author Matter Labs @@ -18,16 +20,6 @@ interface IGovernance { Done } - /// @dev Represents a call to be made during an operation. - /// @param target The address to which the call will be made. - /// @param value The amount of Ether (in wei) to be sent along with the call. - /// @param data The calldata to be executed on the `target` address. - struct Call { - address target; - uint256 value; - bytes data; - } - /// @dev Defines the structure of an operation that Governance executes. /// @param calls An array of `Call` structs, each representing a call to be made during the operation. /// @param predecessor The hash of the predecessor operation, that should be executed before this operation. diff --git a/l1-contracts/contracts/governance/IPermanentRestriction.sol b/l1-contracts/contracts/governance/IPermanentRestriction.sol new file mode 100644 index 000000000..5fb015e33 --- /dev/null +++ b/l1-contracts/contracts/governance/IPermanentRestriction.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @notice The interface for the permanent restriction contract. +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IPermanentRestriction { + /// @notice Emitted when the implementation is allowed or disallowed. + event AdminImplementationAllowed(bytes32 indexed implementationHash, bool isAllowed); + + /// @notice Emitted when a certain calldata is allowed or disallowed. + event AllowedDataChanged(bytes data, bool isAllowed); + + /// @notice Emitted when the selector is labeled as validated or not. + event SelectorValidationChanged(bytes4 indexed selector, bool isValidated); + + /// @notice Emitted when the L2 admin is whitelisted or not. + event AllowL2Admin(address indexed adminAddress); +} diff --git a/l1-contracts/contracts/governance/IRestriction.sol b/l1-contracts/contracts/governance/IRestriction.sol new file mode 100644 index 000000000..b2cc79428 --- /dev/null +++ b/l1-contracts/contracts/governance/IRestriction.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Call} from "./Common.sol"; + +/// @title Restriction contract interface +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IRestriction { + /// @notice Ensures that the invoker has the required role to call the function. + /// @param _call The call data. + /// @param _invoker The address of the invoker. + function validateCall(Call calldata _call, address _invoker) external view; +} diff --git a/l1-contracts/contracts/governance/L2AdminFactory.sol b/l1-contracts/contracts/governance/L2AdminFactory.sol new file mode 100644 index 000000000..d4fe4637c --- /dev/null +++ b/l1-contracts/contracts/governance/L2AdminFactory.sol @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {ChainAdmin} from "./ChainAdmin.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Contract used to deploy ChainAdmin contracts on L2. +/// @dev It can be used to ensure that certain L2 admins are deployed with +/// predefined restrictions. E.g. it can be used to deploy admins that ensure that +/// a chain is a permanent rollup. +/// @dev This contract is expected to be deployed in zkEVM (L2) environment. +/// @dev The contract is immutable, in case the restrictions need to be changed, +/// a new contract should be deployed. +contract L2AdminFactory { + event AdminDeployed(address admin); + + /// @dev We use storage instead of immutable variables due to the + /// specifics of the zkEVM environment, where storage is actually cheaper. + address[] public requiredRestrictions; + + constructor(address[] memory _requiredRestrictions) { + requiredRestrictions = _requiredRestrictions; + } + + /// @notice Deploys a new L2 admin contract. + /// @return admin The address of the deployed admin contract. + function deployAdmin(address[] calldata _additionalRestrictions, bytes32 _salt) external returns (address admin) { + address[] memory restrictions = new address[](requiredRestrictions.length + _additionalRestrictions.length); + uint256 cachedRequired = requiredRestrictions.length; + for (uint256 i = 0; i < cachedRequired; ++i) { + restrictions[i] = requiredRestrictions[i]; + } + uint256 cachedAdditional = _additionalRestrictions.length; + for (uint256 i = 0; i < cachedAdditional; ++i) { + restrictions[requiredRestrictions.length + i] = _additionalRestrictions[i]; + } + + admin = address(new ChainAdmin{salt: _salt}(restrictions)); + } +} diff --git a/l1-contracts/contracts/governance/L2ProxyAdminDeployer.sol b/l1-contracts/contracts/governance/L2ProxyAdminDeployer.sol new file mode 100644 index 000000000..144f951bf --- /dev/null +++ b/l1-contracts/contracts/governance/L2ProxyAdminDeployer.sol @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-length-in-loops + +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice The contract that deterministically deploys a ProxyAdmin, while +/// ensuring that its owner is the aliased governance contract +contract L2ProxyAdminDeployer { + address public immutable PROXY_ADMIN_ADDRESS; + + constructor(address _aliasedGovernance) { + ProxyAdmin admin = new ProxyAdmin{salt: bytes32(0)}(); + admin.transferOwnership(_aliasedGovernance); + + PROXY_ADMIN_ADDRESS = address(admin); + } +} diff --git a/l1-contracts/contracts/governance/PermanentRestriction.sol b/l1-contracts/contracts/governance/PermanentRestriction.sol new file mode 100644 index 000000000..153ce369e --- /dev/null +++ b/l1-contracts/contracts/governance/PermanentRestriction.sol @@ -0,0 +1,311 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {UnsupportedEncodingVersion, CallNotAllowed, ChainZeroAddress, NotAHyperchain, NotAnAdmin, RemovingPermanentRestriction, ZeroAddress, UnallowedImplementation, AlreadyWhitelisted, NotAllowed, NotBridgehub, InvalidSelector, InvalidAddress, NotEnoughGas} from "../common/L1ContractErrors.sol"; + +import {L2TransactionRequestTwoBridgesOuter, BridgehubBurnCTMAssetData} from "../bridgehub/IBridgehub.sol"; +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {L2ContractHelper} from "../common/libraries/L2ContractHelper.sol"; +import {NEW_ENCODING_VERSION} from "../bridge/asset-router/IAssetRouterBase.sol"; + +import {Call} from "./Common.sol"; +import {IRestriction} from "./IRestriction.sol"; +import {IChainAdmin} from "./IChainAdmin.sol"; +import {IBridgehub} from "../bridgehub/IBridgehub.sol"; +import {IZKChain} from "../state-transition/chain-interfaces/IZKChain.sol"; +import {IGetters} from "../state-transition/chain-interfaces/IGetters.sol"; +import {IAdmin} from "../state-transition/chain-interfaces/IAdmin.sol"; + +import {IPermanentRestriction} from "./IPermanentRestriction.sol"; + +/// @dev We use try-catch to test whether some of the conditions should be checked. +/// To avoid attacks based on the 63/64 gas limitations, we ensure that each such call +/// has at least this amount. +uint256 constant MIN_GAS_FOR_FALLABLE_CALL = 5_000_000; + +/// @title PermanentRestriction contract +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice This contract should be used by chains that wish to guarantee that certain security +/// properties are preserved forever. +/// @dev To be deployed as a transparent upgradable proxy, owned by a trusted decentralized governance. +/// @dev Once of the instances of such contract is to ensure that a ZkSyncHyperchain is a rollup forever. +contract PermanentRestriction is IRestriction, IPermanentRestriction, Ownable2StepUpgradeable { + /// @notice The address of the Bridgehub contract. + IBridgehub public immutable BRIDGE_HUB; + + /// @notice The address of the L2 admin factory that should be used to deploy the chain admins + /// for chains that migrated on top of an L2 settlement layer. + /// @dev If this contract is deployed on L2, this address is 0. + /// @dev This address is expected to be the same on all L2 chains. + address public immutable L2_ADMIN_FACTORY; + + /// @notice The mapping of the allowed admin implementations. + mapping(bytes32 implementationCodeHash => bool isAllowed) public allowedAdminImplementations; + + /// @notice The mapping of the allowed calls. + mapping(bytes allowedCalldata => bool isAllowed) public allowedCalls; + + /// @notice The mapping of the validated selectors. + mapping(bytes4 selector => bool isValidated) public validatedSelectors; + + /// @notice The mapping of whitelisted L2 admins. + mapping(address adminAddress => bool isWhitelisted) public allowedL2Admins; + + constructor(IBridgehub _bridgehub, address _l2AdminFactory) { + BRIDGE_HUB = _bridgehub; + L2_ADMIN_FACTORY = _l2AdminFactory; + } + + function initialize(address _initialOwner) external initializer { + // solhint-disable-next-line gas-custom-errors, reason-string + if (_initialOwner == address(0)) { + revert ZeroAddress(); + } + _transferOwnership(_initialOwner); + } + + /// @notice Allows a certain `ChainAdmin` implementation to be used as an admin. + /// @param _implementationHash The hash of the implementation code. + /// @param _isAllowed The flag that indicates if the implementation is allowed. + function allowAdminImplementation(bytes32 _implementationHash, bool _isAllowed) external onlyOwner { + allowedAdminImplementations[_implementationHash] = _isAllowed; + + emit AdminImplementationAllowed(_implementationHash, _isAllowed); + } + + /// @notice Allows a certain calldata for a selector to be used. + /// @param _data The calldata for the function. + /// @param _isAllowed The flag that indicates if the calldata is allowed. + function setAllowedData(bytes calldata _data, bool _isAllowed) external onlyOwner { + allowedCalls[_data] = _isAllowed; + + emit AllowedDataChanged(_data, _isAllowed); + } + + /// @notice Allows a certain selector to be validated. + /// @param _selector The selector of the function. + /// @param _isValidated The flag that indicates if the selector is validated. + function setSelectorIsValidated(bytes4 _selector, bool _isValidated) external onlyOwner { + validatedSelectors[_selector] = _isValidated; + + emit SelectorValidationChanged(_selector, _isValidated); + } + + /// @notice Whitelists a certain L2 admin. + /// @param deploymentSalt The salt for the deployment. + /// @param l2BytecodeHash The hash of the L2 bytecode. + /// @param constructorInputHash The hash of the constructor data for the deployment. + function allowL2Admin(bytes32 deploymentSalt, bytes32 l2BytecodeHash, bytes32 constructorInputHash) external { + // We do not do any additional validations for constructor data or the bytecode, + // we expect that only admins of the allowed format are to be deployed. + address expectedAddress = L2ContractHelper.computeCreate2Address( + L2_ADMIN_FACTORY, + deploymentSalt, + l2BytecodeHash, + constructorInputHash + ); + + if (allowedL2Admins[expectedAddress]) { + revert AlreadyWhitelisted(expectedAddress); + } + + allowedL2Admins[expectedAddress] = true; + emit AllowL2Admin(expectedAddress); + } + + /// @inheritdoc IRestriction + function validateCall( + Call calldata _call, + address // _invoker + ) external view override { + _validateAsChainAdmin(_call); + _validateMigrationToL2(_call); + _validateRemoveRestriction(_call); + } + + /// @notice Validates the migration to an L2 settlement layer. + /// @param _call The call data. + /// @dev Note that we do not need to validate the migration to the L1 layer as the admin + /// is not changed in this case. + function _validateMigrationToL2(Call calldata _call) internal view { + _ensureEnoughGas(); + try this.tryGetNewAdminFromMigration(_call) returns (address admin) { + if (!allowedL2Admins[admin]) { + revert NotAllowed(admin); + } + } catch { + // It was not the migration call, so we do nothing + } + } + + /// @notice Validates the call as the chain admin + /// @param _call The call data. + function _validateAsChainAdmin(Call calldata _call) internal view { + if (!_isAdminOfAChain(_call.target)) { + // We only validate calls related to being an admin of a chain + return; + } + + // All calls with the length of the data below 4 will get into `receive`/`fallback` functions, + // we consider it to always be allowed. + if (_call.data.length < 4) { + return; + } + + bytes4 selector = bytes4(_call.data[:4]); + + if (selector == IAdmin.setPendingAdmin.selector) { + _validateNewAdmin(_call); + return; + } + + if (!validatedSelectors[selector]) { + // The selector is not validated, any data is allowed. + return; + } + + if (!allowedCalls[_call.data]) { + revert CallNotAllowed(_call.data); + } + } + + /// @notice Validates the correctness of the new admin. + /// @param _call The call data. + /// @dev Ensures that the admin has a whitelisted implementation and does not remove this restriction. + function _validateNewAdmin(Call calldata _call) internal view { + address newChainAdmin = abi.decode(_call.data[4:], (address)); + + bytes32 implementationCodeHash = newChainAdmin.codehash; + + if (!allowedAdminImplementations[implementationCodeHash]) { + revert UnallowedImplementation(implementationCodeHash); + } + + // Since the implementation is known to be correct (from the checks above), we + // can safely trust the returned value from the call below + if (!IChainAdmin(newChainAdmin).isRestrictionActive(address(this))) { + revert RemovingPermanentRestriction(); + } + } + + /// @notice Validates the removal of the restriction. + /// @param _call The call data. + /// @dev Ensures that this restriction is not removed. + function _validateRemoveRestriction(Call calldata _call) internal view { + if (_call.target != msg.sender) { + return; + } + + if (bytes4(_call.data[:4]) != IChainAdmin.removeRestriction.selector) { + return; + } + + address removedRestriction = abi.decode(_call.data[4:], (address)); + + if (removedRestriction == address(this)) { + revert RemovingPermanentRestriction(); + } + } + + /// @notice Checks if the `msg.sender` is an admin of a certain ZkSyncHyperchain. + /// @param _chain The address of the chain. + function _isAdminOfAChain(address _chain) internal view returns (bool) { + _ensureEnoughGas(); + (bool success, ) = address(this).staticcall(abi.encodeCall(this.tryCompareAdminOfAChain, (_chain, msg.sender))); + return success; + } + + /// @notice Tries to compare the admin of a chain with the potential admin. + /// @param _chain The address of the chain. + /// @param _potentialAdmin The address of the potential admin. + /// @dev This function reverts if the `_chain` is not a ZkSyncHyperchain or the `_potentialAdmin` is not the + /// admin of the chain. + function tryCompareAdminOfAChain(address _chain, address _potentialAdmin) external view { + if (_chain == address(0)) { + revert ChainZeroAddress(); + } + + // Unfortunately there is no easy way to double check that indeed the `_chain` is a ZkSyncHyperchain. + // So we do the following: + // - Query it for `chainId`. If it reverts, it is not a ZkSyncHyperchain. + // - Query the Bridgehub for the Hyperchain with the given `chainId`. + // - We compare the corresponding addresses + + // Note, that we do not use an explicit call here to ensure that the function does not panic in case of + // incorrect `_chain` address. + (bool success, bytes memory data) = _chain.staticcall(abi.encodeWithSelector(IGetters.getChainId.selector)); + if (!success || data.length < 32) { + revert NotAHyperchain(_chain); + } + + // Can not fail + uint256 chainId = abi.decode(data, (uint256)); + + // Note, that here it is important to use the legacy `getHyperchain` function, so that the contract + // is compatible with the legacy ones. + if (BRIDGE_HUB.getHyperchain(chainId) != _chain) { + revert NotAHyperchain(_chain); + } + + // Now, the chain is known to be a hyperchain, so it should implement the corresponding interface + address admin = IZKChain(_chain).getAdmin(); + if (admin != _potentialAdmin) { + revert NotAnAdmin(admin, _potentialAdmin); + } + } + + /// @notice Tries to get the new admin from the migration. + /// @param _call The call data. + /// @dev This function reverts if the provided call was not a migration call. + function tryGetNewAdminFromMigration(Call calldata _call) external view returns (address) { + if (_call.target != address(BRIDGE_HUB)) { + revert NotBridgehub(_call.target); + } + + if (bytes4(_call.data[:4]) != IBridgehub.requestL2TransactionTwoBridges.selector) { + revert InvalidSelector(bytes4(_call.data[:4])); + } + + address sharedBridge = BRIDGE_HUB.sharedBridge(); + + L2TransactionRequestTwoBridgesOuter memory request = abi.decode( + _call.data[4:], + (L2TransactionRequestTwoBridgesOuter) + ); + + if (request.secondBridgeAddress != sharedBridge) { + revert InvalidAddress(sharedBridge, request.secondBridgeAddress); + } + + bytes memory secondBridgeData = request.secondBridgeCalldata; + if (secondBridgeData[0] != NEW_ENCODING_VERSION) { + revert UnsupportedEncodingVersion(); + } + bytes memory encodedData = new bytes(secondBridgeData.length - 1); + assembly { + mcopy(add(encodedData, 0x20), add(secondBridgeData, 0x21), mload(encodedData)) + } + + (bytes32 chainAssetId, bytes memory bridgehubData) = abi.decode(encodedData, (bytes32, bytes)); + // We will just check that the chainAssetId is a valid chainAssetId. + // For now, for simplicity, we do not check that the admin is exactly the admin + // of this chain. + address ctmAddress = BRIDGE_HUB.ctmAssetIdToAddress(chainAssetId); + if (ctmAddress == address(0)) { + revert ZeroAddress(); + } + + BridgehubBurnCTMAssetData memory burnData = abi.decode(bridgehubData, (BridgehubBurnCTMAssetData)); + (address l2Admin, ) = abi.decode(burnData.ctmData, (address, bytes)); + + return l2Admin; + } + + function _ensureEnoughGas() internal view { + if (gasleft() < MIN_GAS_FOR_FALLABLE_CALL) { + revert NotEnoughGas(); + } + } +} diff --git a/l1-contracts/contracts/governance/TransitionaryOwner.sol b/l1-contracts/contracts/governance/TransitionaryOwner.sol new file mode 100644 index 000000000..9248204bf --- /dev/null +++ b/l1-contracts/contracts/governance/TransitionaryOwner.sol @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-length-in-loops + +import {Ownable2Step} from "@openzeppelin/contracts-v4/access/Ownable2Step.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice The contract that is used a temporary owner for Ownable2Step contracts until the +/// governance can accept the ownership +contract TransitionaryOwner { + address public immutable GOVERNANCE_ADDRESS; + + constructor(address _governanceAddress) { + GOVERNANCE_ADDRESS = _governanceAddress; + } + + /// @notice Claims that ownership of a contract and transfers it to the governance + function claimOwnershipAndGiveToGovernance(address target) external { + Ownable2Step(target).acceptOwnership(); + Ownable2Step(target).transferOwnership(GOVERNANCE_ADDRESS); + } +} diff --git a/l1-contracts/contracts/state-transition/ChainTypeManager.sol b/l1-contracts/contracts/state-transition/ChainTypeManager.sol new file mode 100644 index 000000000..2760e36c3 --- /dev/null +++ b/l1-contracts/contracts/state-transition/ChainTypeManager.sol @@ -0,0 +1,518 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-custom-errors, reason-string + +import {EnumerableMap} from "@openzeppelin/contracts-v4/utils/structs/EnumerableMap.sol"; +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; + +import {Diamond} from "./libraries/Diamond.sol"; +import {DiamondProxy} from "./chain-deps/DiamondProxy.sol"; +import {IAdmin} from "./chain-interfaces/IAdmin.sol"; +import {IDiamondInit} from "./chain-interfaces/IDiamondInit.sol"; +import {IExecutor} from "./chain-interfaces/IExecutor.sol"; +import {IChainTypeManager, ChainTypeManagerInitializeData, ChainCreationParams} from "./IChainTypeManager.sol"; +import {IZKChain} from "./chain-interfaces/IZKChain.sol"; +import {FeeParams} from "./chain-deps/ZKChainStorage.sol"; +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; +import {L2_TO_L1_LOG_SERIALIZE_SIZE, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK} from "../common/Config.sol"; +import {Unauthorized, ZeroAddress, HashMismatch, GenesisUpgradeZero, GenesisBatchHashZero, GenesisIndexStorageZero, GenesisBatchCommitmentZero} from "../common/L1ContractErrors.sol"; +import {SemVer} from "../common/libraries/SemVer.sol"; +import {IBridgehub} from "../bridgehub/IBridgehub.sol"; + +/// @title State Transition Manager contract +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +contract ChainTypeManager is IChainTypeManager, ReentrancyGuard, Ownable2StepUpgradeable { + using EnumerableMap for EnumerableMap.UintToAddressMap; + + /// @notice Address of the bridgehub + address public immutable BRIDGE_HUB; + + /// @notice The map from chainId => zkChain contract + EnumerableMap.UintToAddressMap internal __DEPRECATED_zkChainMap; + + /// @dev The batch zero hash, calculated at initialization + bytes32 public storedBatchZero; + + /// @dev The stored cutData for diamond cut + bytes32 public initialCutHash; + + /// @dev The l1GenesisUpgrade contract address, used to set chainId + address public l1GenesisUpgrade; + + /// @dev The current packed protocolVersion. To access human-readable version, use `getSemverProtocolVersion` function. + uint256 public protocolVersion; + + /// @dev The timestamp when protocolVersion can be last used + mapping(uint256 _protocolVersion => uint256) public protocolVersionDeadline; + + /// @dev The validatorTimelock contract address + address public validatorTimelock; + + /// @dev The stored cutData for upgrade diamond cut. protocolVersion => cutHash + mapping(uint256 protocolVersion => bytes32 cutHash) public upgradeCutHash; + + /// @dev The address used to manage non critical updates + address public admin; + + /// @dev The address to accept the admin role + address private pendingAdmin; + + /// @dev The initial force deployment hash + bytes32 public initialForceDeploymentHash; + + /// @dev Contract is expected to be used as proxy implementation. + /// @dev Initialize the implementation to prevent Parity hack. + constructor(address _bridgehub) reentrancyGuardInitializer { + BRIDGE_HUB = _bridgehub; + + // While this does not provide a protection in the production, it is needed for local testing + // Length of the L2Log encoding should not be equal to the length of other L2Logs' tree nodes preimages + assert(L2_TO_L1_LOG_SERIALIZE_SIZE != 2 * 32); + } + + /// @notice only the bridgehub can call + modifier onlyBridgehub() { + if (msg.sender != BRIDGE_HUB) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice the admin can call, for non-critical updates + modifier onlyOwnerOrAdmin() { + if (msg.sender != admin && msg.sender != owner()) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @return The tuple of (major, minor, patch) protocol version. + function getSemverProtocolVersion() external view returns (uint32, uint32, uint32) { + // slither-disable-next-line unused-return + return SemVer.unpackSemVer(SafeCast.toUint96(protocolVersion)); + } + + /// @notice return the chain contract address for a chainId + function getZKChain(uint256 _chainId) public view returns (address) { + return IBridgehub(BRIDGE_HUB).getZKChain(_chainId); + } + + /// @notice return the chain contract address for a chainId + /// @notice Do not use! use getZKChain instead. This will be removed. + function getZKChainLegacy(uint256 _chainId) public view returns (address chainAddress) { + // slither-disable-next-line unused-return + (, chainAddress) = __DEPRECATED_zkChainMap.tryGet(_chainId); + } + + /// @notice Returns the address of the ZK chain admin with the corresponding chainID. + /// @notice Not related to the CTM, but it is here for legacy reasons. + /// @param _chainId the chainId of the chain + function getChainAdmin(uint256 _chainId) external view override returns (address) { + return IZKChain(getZKChain(_chainId)).getAdmin(); + } + + /// @dev initialize + function initialize(ChainTypeManagerInitializeData calldata _initializeData) external reentrancyGuardInitializer { + if (_initializeData.owner == address(0)) { + revert ZeroAddress(); + } + _transferOwnership(_initializeData.owner); + + protocolVersion = _initializeData.protocolVersion; + protocolVersionDeadline[_initializeData.protocolVersion] = type(uint256).max; + validatorTimelock = _initializeData.validatorTimelock; + + _setChainCreationParams(_initializeData.chainCreationParams); + } + + /// @notice Updates the parameters with which a new chain is created + /// @param _chainCreationParams The new chain creation parameters + function _setChainCreationParams(ChainCreationParams calldata _chainCreationParams) internal { + if (_chainCreationParams.genesisUpgrade == address(0)) { + revert GenesisUpgradeZero(); + } + if (_chainCreationParams.genesisBatchHash == bytes32(0)) { + revert GenesisBatchHashZero(); + } + if (_chainCreationParams.genesisIndexRepeatedStorageChanges == uint64(0)) { + revert GenesisIndexStorageZero(); + } + if (_chainCreationParams.genesisBatchCommitment == bytes32(0)) { + revert GenesisBatchCommitmentZero(); + } + + l1GenesisUpgrade = _chainCreationParams.genesisUpgrade; + + // We need to initialize the state hash because it is used in the commitment of the next batch + IExecutor.StoredBatchInfo memory batchZero = IExecutor.StoredBatchInfo({ + batchNumber: 0, + batchHash: _chainCreationParams.genesisBatchHash, + indexRepeatedStorageChanges: _chainCreationParams.genesisIndexRepeatedStorageChanges, + numberOfLayer1Txs: 0, + priorityOperationsHash: EMPTY_STRING_KECCAK, + l2LogsTreeRoot: DEFAULT_L2_LOGS_TREE_ROOT_HASH, + timestamp: 0, + commitment: _chainCreationParams.genesisBatchCommitment + }); + storedBatchZero = keccak256(abi.encode(batchZero)); + bytes32 newInitialCutHash = keccak256(abi.encode(_chainCreationParams.diamondCut)); + initialCutHash = newInitialCutHash; + bytes32 forceDeploymentHash = keccak256(abi.encode(_chainCreationParams.forceDeploymentsData)); + initialForceDeploymentHash = forceDeploymentHash; + + emit NewChainCreationParams({ + genesisUpgrade: _chainCreationParams.genesisUpgrade, + genesisBatchHash: _chainCreationParams.genesisBatchHash, + genesisIndexRepeatedStorageChanges: _chainCreationParams.genesisIndexRepeatedStorageChanges, + genesisBatchCommitment: _chainCreationParams.genesisBatchCommitment, + newInitialCutHash: newInitialCutHash, + forceDeploymentHash: forceDeploymentHash + }); + } + + /// @notice Updates the parameters with which a new chain is created + /// @param _chainCreationParams The new chain creation parameters + function setChainCreationParams(ChainCreationParams calldata _chainCreationParams) external onlyOwner { + _setChainCreationParams(_chainCreationParams); + } + + /// @notice Starts the transfer of admin rights. Only the current admin can propose a new pending one. + /// @notice New admin can accept admin rights by calling `acceptAdmin` function. + /// @param _newPendingAdmin Address of the new admin + /// @dev Please note, if the owner wants to enforce the admin change it must execute both `setPendingAdmin` and + /// `acceptAdmin` atomically. Otherwise `admin` can set different pending admin and so fail to accept the admin rights. + function setPendingAdmin(address _newPendingAdmin) external onlyOwnerOrAdmin { + // Save previous value into the stack to put it into the event later + address oldPendingAdmin = pendingAdmin; + // Change pending admin + pendingAdmin = _newPendingAdmin; + emit NewPendingAdmin(oldPendingAdmin, _newPendingAdmin); + } + + /// @notice Accepts transfer of admin rights. Only pending admin can accept the role. + function acceptAdmin() external { + address currentPendingAdmin = pendingAdmin; + // Only proposed by current admin address can claim the admin rights + if (msg.sender != currentPendingAdmin) { + revert Unauthorized(msg.sender); + } + + address previousAdmin = admin; + admin = currentPendingAdmin; + delete pendingAdmin; + + emit NewPendingAdmin(currentPendingAdmin, address(0)); + emit NewAdmin(previousAdmin, currentPendingAdmin); + } + + /// @dev set validatorTimelock. Cannot do it during initialization, as validatorTimelock is deployed after CTM + /// @param _validatorTimelock the new validatorTimelock address + function setValidatorTimelock(address _validatorTimelock) external onlyOwnerOrAdmin { + address oldValidatorTimelock = validatorTimelock; + validatorTimelock = _validatorTimelock; + emit NewValidatorTimelock(oldValidatorTimelock, _validatorTimelock); + } + + /// @dev set New Version with upgrade from old version + /// @param _cutData the new diamond cut data + /// @param _oldProtocolVersion the old protocol version + /// @param _oldProtocolVersionDeadline the deadline for the old protocol version + /// @param _newProtocolVersion the new protocol version + function setNewVersionUpgrade( + Diamond.DiamondCutData calldata _cutData, + uint256 _oldProtocolVersion, + uint256 _oldProtocolVersionDeadline, + uint256 _newProtocolVersion + ) external onlyOwner { + bytes32 newCutHash = keccak256(abi.encode(_cutData)); + uint256 previousProtocolVersion = protocolVersion; + upgradeCutHash[_oldProtocolVersion] = newCutHash; + protocolVersionDeadline[_oldProtocolVersion] = _oldProtocolVersionDeadline; + protocolVersionDeadline[_newProtocolVersion] = type(uint256).max; + protocolVersion = _newProtocolVersion; + emit NewProtocolVersion(previousProtocolVersion, _newProtocolVersion); + emit NewUpgradeCutHash(_oldProtocolVersion, newCutHash); + emit NewUpgradeCutData(_newProtocolVersion, _cutData); + } + + /// @dev check that the protocolVersion is active + /// @param _protocolVersion the protocol version to check + function protocolVersionIsActive(uint256 _protocolVersion) external view override returns (bool) { + return block.timestamp <= protocolVersionDeadline[_protocolVersion]; + } + + /// @dev set the protocol version timestamp + /// @param _protocolVersion the protocol version + /// @param _timestamp the timestamp is the deadline + function setProtocolVersionDeadline(uint256 _protocolVersion, uint256 _timestamp) external onlyOwner { + protocolVersionDeadline[_protocolVersion] = _timestamp; + } + + /// @dev set upgrade for some protocolVersion + /// @param _cutData the new diamond cut data + /// @param _oldProtocolVersion the old protocol version + function setUpgradeDiamondCut( + Diamond.DiamondCutData calldata _cutData, + uint256 _oldProtocolVersion + ) external onlyOwner { + bytes32 newCutHash = keccak256(abi.encode(_cutData)); + upgradeCutHash[_oldProtocolVersion] = newCutHash; + emit NewUpgradeCutHash(_oldProtocolVersion, newCutHash); + } + + /// @dev freezes the specified chain + /// @param _chainId the chainId of the chain + function freezeChain(uint256 _chainId) external onlyOwner { + IZKChain(getZKChain(_chainId)).freezeDiamond(); + } + + /// @dev freezes the specified chain + /// @param _chainId the chainId of the chain + function unfreezeChain(uint256 _chainId) external onlyOwner { + IZKChain(getZKChain(_chainId)).unfreezeDiamond(); + } + + /// @dev reverts batches on the specified chain + /// @param _chainId the chainId of the chain + /// @param _newLastBatch the new last batch + function revertBatches(uint256 _chainId, uint256 _newLastBatch) external onlyOwnerOrAdmin { + IZKChain(getZKChain(_chainId)).revertBatchesSharedBridge(_chainId, _newLastBatch); + } + + /// @dev execute predefined upgrade + /// @param _chainId the chainId of the chain + /// @param _oldProtocolVersion the old protocol version + /// @param _diamondCut the diamond cut data + function upgradeChainFromVersion( + uint256 _chainId, + uint256 _oldProtocolVersion, + Diamond.DiamondCutData calldata _diamondCut + ) external onlyOwner { + IZKChain(getZKChain(_chainId)).upgradeChainFromVersion(_oldProtocolVersion, _diamondCut); + } + + /// @dev executes upgrade on chain + /// @param _chainId the chainId of the chain + /// @param _diamondCut the diamond cut data + function executeUpgrade(uint256 _chainId, Diamond.DiamondCutData calldata _diamondCut) external onlyOwner { + IZKChain(getZKChain(_chainId)).executeUpgrade(_diamondCut); + } + + /// @dev setPriorityTxMaxGasLimit for the specified chain + /// @param _chainId the chainId of the chain + /// @param _maxGasLimit the new max gas limit + function setPriorityTxMaxGasLimit(uint256 _chainId, uint256 _maxGasLimit) external onlyOwner { + IZKChain(getZKChain(_chainId)).setPriorityTxMaxGasLimit(_maxGasLimit); + } + + /// @dev setTokenMultiplier for the specified chain + /// @param _chainId the chainId of the chain + /// @param _nominator the new nominator of the token multiplier + /// @param _denominator the new denominator of the token multiplier + function setTokenMultiplier(uint256 _chainId, uint128 _nominator, uint128 _denominator) external onlyOwner { + IZKChain(getZKChain(_chainId)).setTokenMultiplier(_nominator, _denominator); + } + + /// @dev changeFeeParams for the specified chain + /// @param _chainId the chainId of the chain + /// @param _newFeeParams the new fee params + function changeFeeParams(uint256 _chainId, FeeParams calldata _newFeeParams) external onlyOwner { + IZKChain(getZKChain(_chainId)).changeFeeParams(_newFeeParams); + } + + /// @dev setValidator for the specified chain + /// @param _chainId the chainId of the chain + /// @param _validator the new validator + /// @param _active whether the validator is active + function setValidator(uint256 _chainId, address _validator, bool _active) external onlyOwnerOrAdmin { + IZKChain(getZKChain(_chainId)).setValidator(_validator, _active); + } + + /// @dev setPorterAvailability for the specified chain + /// @param _chainId the chainId of the chain + /// @param _zkPorterIsAvailable whether the zkPorter mode is available + function setPorterAvailability(uint256 _chainId, bool _zkPorterIsAvailable) external onlyOwner { + IZKChain(getZKChain(_chainId)).setPorterAvailability(_zkPorterIsAvailable); + } + + /// registration + + /// @notice deploys a full set of chains contracts + /// @param _chainId the chain's id + /// @param _baseTokenAssetId the base token asset id used to pay for gas fees + /// @param _admin the chain's admin address + /// @param _diamondCut the diamond cut data that initializes the chains Diamond Proxy + function _deployNewChain( + uint256 _chainId, + bytes32 _baseTokenAssetId, + address _admin, + bytes memory _diamondCut + ) internal returns (address zkChainAddress) { + if (getZKChain(_chainId) != address(0)) { + // ZKChain already registered + return getZKChain(_chainId); + } + + // check not registered + Diamond.DiamondCutData memory diamondCut = abi.decode(_diamondCut, (Diamond.DiamondCutData)); + + { + // check input + bytes32 cutHashInput = keccak256(_diamondCut); + if (cutHashInput != initialCutHash) { + revert HashMismatch(initialCutHash, cutHashInput); + } + } + + // construct init data + bytes memory initData; + /// all together 4+9*32=292 bytes for the selector + mandatory data + // solhint-disable-next-line func-named-parameters + initData = bytes.concat( + IDiamondInit.initialize.selector, + bytes32(_chainId), + bytes32(uint256(uint160(BRIDGE_HUB))), + bytes32(uint256(uint160(address(this)))), + bytes32(protocolVersion), + bytes32(uint256(uint160(_admin))), + bytes32(uint256(uint160(validatorTimelock))), + _baseTokenAssetId, + storedBatchZero, + diamondCut.initCalldata + ); + + diamondCut.initCalldata = initData; + // deploy zkChainContract + // slither-disable-next-line reentrancy-no-eth + DiamondProxy zkChainContract = new DiamondProxy{salt: bytes32(0)}(block.chainid, diamondCut); + // save data + zkChainAddress = address(zkChainContract); + emit NewZKChain(_chainId, zkChainAddress); + } + + /// @notice called by Bridgehub when a chain registers + /// @param _chainId the chain's id + /// @param _baseTokenAssetId the base token asset id used to pay for gas fees + /// @param _admin the chain's admin address + /// @param _initData the diamond cut data, force deployments and factoryDeps encoded + /// @param _factoryDeps the factory dependencies used for the genesis upgrade + /// that initializes the chains Diamond Proxy + function createNewChain( + uint256 _chainId, + bytes32 _baseTokenAssetId, + address _admin, + bytes calldata _initData, + bytes[] calldata _factoryDeps + ) external onlyBridgehub returns (address zkChainAddress) { + (bytes memory _diamondCut, bytes memory _forceDeploymentData) = abi.decode(_initData, (bytes, bytes)); + + // solhint-disable-next-line func-named-parameters + zkChainAddress = _deployNewChain(_chainId, _baseTokenAssetId, _admin, _diamondCut); + + { + // check input + bytes32 forceDeploymentHash = keccak256(abi.encode(_forceDeploymentData)); + require(forceDeploymentHash == initialForceDeploymentHash, "CTM: initial force deployment mismatch"); + } + // genesis upgrade, deploys some contracts, sets chainId + IAdmin(zkChainAddress).genesisUpgrade( + l1GenesisUpgrade, + address(IBridgehub(BRIDGE_HUB).l1CtmDeployer()), + _forceDeploymentData, + _factoryDeps + ); + } + + /// @param _chainId the chainId of the chain + function getProtocolVersion(uint256 _chainId) public view returns (uint256) { + return IZKChain(getZKChain(_chainId)).getProtocolVersion(); + } + + /// @param _newSettlementLayerChainId the chainId of the chain + /// @param _isWhitelisted whether the chain is whitelisted + function registerSettlementLayer(uint256 _newSettlementLayerChainId, bool _isWhitelisted) external onlyOwner { + require(_newSettlementLayerChainId != 0, "Bad chain id"); + + // Currently, we require that the sync layer is deployed by the same CTM. + require(getZKChain(_newSettlementLayerChainId) != address(0), "CTM: sync layer not registered"); + + IBridgehub(BRIDGE_HUB).registerSettlementLayer(_newSettlementLayerChainId, _isWhitelisted); + } + + /// @notice Called by the bridgehub during the migration of a chain to another settlement layer. + /// @param _chainId The chain id of the chain to be migrated. + /// @param _data The data needed to perform the migration. + function forwardedBridgeBurn( + uint256 _chainId, + bytes calldata _data + ) external view override onlyBridgehub returns (bytes memory ctmForwardedBridgeMintData) { + // Note that the `_diamondCut` here is not for the current chain, for the chain where the migration + // happens. The correctness of it will be checked on the CTM on the new settlement layer. + (address _newSettlementLayerAdmin, bytes memory _diamondCut) = abi.decode(_data, (address, bytes)); + require(_newSettlementLayerAdmin != address(0), "CTM: admin zero"); + + // We ensure that the chain has the latest protocol version to avoid edge cases + // related to different protocol version support. + address zkChain = getZKChain(_chainId); + require(IZKChain(zkChain).getProtocolVersion() == protocolVersion, "CTM: outdated pv"); + + return + abi.encode( + IBridgehub(BRIDGE_HUB).baseTokenAssetId(_chainId), + _newSettlementLayerAdmin, + protocolVersion, + _diamondCut + ); + } + + /// @notice Called by the bridgehub during the migration of a chain to the current settlement layer. + /// @param _chainId The chain id of the chain to be migrated. + /// @param _ctmData The data returned from `forwardedBridgeBurn` for the chain. + function forwardedBridgeMint( + uint256 _chainId, + bytes calldata _ctmData + ) external override onlyBridgehub returns (address chainAddress) { + (bytes32 _baseTokenAssetId, address _admin, uint256 _protocolVersion, bytes memory _diamondCut) = abi.decode( + _ctmData, + (bytes32, address, uint256, bytes) + ); + + // We ensure that the chain has the latest protocol version to avoid edge cases + // related to different protocol version support. + require(_protocolVersion == protocolVersion, "CTM, outdated pv"); + chainAddress = _deployNewChain({ + _chainId: _chainId, + _baseTokenAssetId: _baseTokenAssetId, + _admin: _admin, + _diamondCut: _diamondCut + }); + } + + /// @notice Called by the bridgehub during the failed migration of a chain. + /// param _chainId the chainId of the chain + /// param _assetInfo the assetInfo of the chain + /// param _depositSender the address of that sent the deposit + /// param _ctmData the data of the migration + function forwardedBridgeRecoverFailedTransfer( + uint256 /* _chainId */, + bytes32 /* _assetInfo */, + address /* _depositSender */, + bytes calldata /* _ctmData */ + ) external { + // Function is empty due to the fact that when calling `forwardedBridgeBurn` there are no + // state updates that occur. + } + + /*////////////////////////////////////////////////////////////// + Legacy functions + //////////////////////////////////////////////////////////////*/ + + /// @notice return the chain contract address for a chainId + function getHyperchain(uint256 _chainId) public view returns (address) { + return getZKChain(_chainId); + } +} diff --git a/l1-contracts/contracts/state-transition/DualVerifier.sol b/l1-contracts/contracts/state-transition/DualVerifier.sol new file mode 100644 index 000000000..d3cfadf30 --- /dev/null +++ b/l1-contracts/contracts/state-transition/DualVerifier.sol @@ -0,0 +1,44 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IVerifier} from "./chain-interfaces/IVerifier.sol"; +import {ZKChainStorage} from "./chain-deps/ZKChainStorage.sol"; +import {ZeroAddress} from "../common/L1ContractErrors.sol"; + +/// @title Dual Verifier +/// @author Matter Labs +/// @notice Wrapper contract to verify a zk-SNARK proof based on the proof type +/// @custom:security-contact security@matterlabs.dev +contract DualVerifier is IVerifier{ + + // slither-disable-next-line uninitialized-state + ZKChainStorage internal s; + + /// @dev Routes the proof verification to appropriate verifier based on the length of proof + /// @inheritdoc IVerifier + function verify( + uint256[] calldata _publicInputs, + uint256[] calldata _proof + ) public view virtual returns (bool) { + address plonkVerifier = s.plonkVerifier; + address fflonkVerifier = s.fflonkVerifier; + uint256 fflonkProofLength = s.fflonkProofLength; + uint256 proofLength = _proof.length; + // Selects the verifier based on the proof type + address verifier; + if (proofLength == fflonkProofLength) { + verifier = fflonkVerifier; + } + else { + verifier = plonkVerifier; + } + + if (verifier == address(0)) { + revert ZeroAddress(); + } + + return IVerifier(verifier).verify(_publicInputs, _proof); + + } +} \ No newline at end of file diff --git a/l1-contracts/contracts/state-transition/IStateTransitionManager.sol b/l1-contracts/contracts/state-transition/IChainTypeManager.sol similarity index 70% rename from l1-contracts/contracts/state-transition/IStateTransitionManager.sol rename to l1-contracts/contracts/state-transition/IChainTypeManager.sol index d58f46df5..90b500b28 100644 --- a/l1-contracts/contracts/state-transition/IStateTransitionManager.sol +++ b/l1-contracts/contracts/state-transition/IChainTypeManager.sol @@ -1,18 +1,20 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; import {Diamond} from "./libraries/Diamond.sol"; import {L2CanonicalTransaction} from "../common/Messaging.sol"; -import {FeeParams} from "./chain-deps/ZkSyncHyperchainStorage.sol"; +import {FeeParams} from "./chain-deps/ZKChainStorage.sol"; + +// import {IBridgehub} from "../bridgehub/IBridgehub.sol"; -/// @notice Struct that holds all data needed for initializing STM Proxy. +/// @notice Struct that holds all data needed for initializing CTM Proxy. /// @dev We use struct instead of raw parameters in `initialize` function to prevent "Stack too deep" error /// @param owner The address who can manage non-critical updates in the contract /// @param validatorTimelock The address that serves as consensus, i.e. can submit blocks to be processed /// @param chainCreationParams The struct that contains the fields that define how a new chain should be created /// @param protocolVersion The initial protocol version on the newly deployed chain -struct StateTransitionManagerInitializeData { +struct ChainTypeManagerInitializeData { address owner; address validatorTimelock; ChainCreationParams chainCreationParams; @@ -20,27 +22,29 @@ struct StateTransitionManagerInitializeData { } /// @notice The struct that contains the fields that define how a new chain should be created -/// within this STM. +/// within this CTM. /// @param genesisUpgrade The address that is used in the diamond cut initialize address on chain creation /// @param genesisBatchHash Batch hash of the genesis (initial) batch /// @param genesisIndexRepeatedStorageChanges The serial number of the shortcut storage key for the genesis batch /// @param genesisBatchCommitment The zk-proof commitment for the genesis batch /// @param diamondCut The diamond cut for the first upgrade transaction on the newly deployed chain +// solhint-disable-next-line gas-struct-packing struct ChainCreationParams { address genesisUpgrade; bytes32 genesisBatchHash; uint64 genesisIndexRepeatedStorageChanges; bytes32 genesisBatchCommitment; Diamond.DiamondCutData diamondCut; + bytes forceDeploymentsData; } -interface IStateTransitionManager { - /// @dev Emitted when a new Hyperchain is added - event NewHyperchain(uint256 indexed _chainId, address indexed _hyperchainContract); +interface IChainTypeManager { + /// @dev Emitted when a new ZKChain is added + event NewZKChain(uint256 indexed _chainId, address indexed _zkChainContract); - /// @dev emitted when an chain registers and a SetChainIdUpgrade happens - event SetChainIdUpgrade( - address indexed _hyperchain, + /// @dev emitted when an chain registers and a GenesisUpgrade happens + event GenesisUpgrade( + address indexed _zkChain, L2CanonicalTransaction _l2Transaction, uint256 indexed _protocolVersion ); @@ -61,7 +65,8 @@ interface IStateTransitionManager { bytes32 genesisBatchHash, uint64 genesisIndexRepeatedStorageChanges, bytes32 genesisBatchCommitment, - bytes32 newInitialCutHash + bytes32 newInitialCutHash, + bytes32 forceDeploymentHash ); /// @notice New UpgradeCutHash @@ -79,17 +84,15 @@ interface IStateTransitionManager { function acceptAdmin() external; - function getAllHyperchains() external view returns (address[] memory); + function getZKChain(uint256 _chainId) external view returns (address); - function getAllHyperchainChainIDs() external view returns (uint256[] memory); - - function getHyperchain(uint256 _chainId) external view returns (address); + function getZKChainLegacy(uint256 _chainId) external view returns (address); function storedBatchZero() external view returns (bytes32); function initialCutHash() external view returns (bytes32); - function genesisUpgrade() external view returns (address); + function l1GenesisUpgrade() external view returns (address); function upgradeCutHash(uint256 _protocolVersion) external view returns (bytes32); @@ -99,7 +102,9 @@ interface IStateTransitionManager { function protocolVersionIsActive(uint256 _protocolVersion) external view returns (bool); - function initialize(StateTransitionManagerInitializeData calldata _initializeData) external; + function getProtocolVersion(uint256 _chainId) external view returns (uint256); + + function initialize(ChainTypeManagerInitializeData calldata _initializeData) external; function setValidatorTimelock(address _validatorTimelock) external; @@ -109,18 +114,16 @@ interface IStateTransitionManager { function createNewChain( uint256 _chainId, - address _baseToken, - address _sharedBridge, + bytes32 _baseTokenAssetId, address _admin, - bytes calldata _diamondCut - ) external; - - function registerAlreadyDeployedHyperchain(uint256 _chainId, address _hyperchain) external; + bytes calldata _initData, + bytes[] calldata _factoryDeps + ) external returns (address); function setNewVersionUpgrade( Diamond.DiamondCutData calldata _cutData, uint256 _oldProtocolVersion, - uint256 _oldprotocolVersionDeadline, + uint256 _oldProtocolVersionDeadline, uint256 _newProtocolVersion ) external; @@ -149,4 +152,22 @@ interface IStateTransitionManager { ) external; function getSemverProtocolVersion() external view returns (uint32, uint32, uint32); + + function registerSettlementLayer(uint256 _newSettlementLayerChainId, bool _isWhitelisted) external; + + event BridgeInitialize(address indexed l1Token, string name, string symbol, uint8 decimals); + + function forwardedBridgeBurn( + uint256 _chainId, + bytes calldata _data + ) external returns (bytes memory _bridgeMintData); + + function forwardedBridgeMint(uint256 _chainId, bytes calldata _data) external returns (address); + + function forwardedBridgeRecoverFailedTransfer( + uint256 _chainId, + bytes32 _assetInfo, + address _depositSender, + bytes calldata _ctmData + ) external; } diff --git a/l1-contracts/contracts/state-transition/StateTransitionManager.sol b/l1-contracts/contracts/state-transition/StateTransitionManager.sol deleted file mode 100644 index 37929f94c..000000000 --- a/l1-contracts/contracts/state-transition/StateTransitionManager.sol +++ /dev/null @@ -1,438 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {EnumerableMap} from "@openzeppelin/contracts/utils/structs/EnumerableMap.sol"; -import {SafeCast} from "@openzeppelin/contracts/utils/math/SafeCast.sol"; - -import {Diamond} from "./libraries/Diamond.sol"; -import {DiamondProxy} from "./chain-deps/DiamondProxy.sol"; -import {IAdmin} from "./chain-interfaces/IAdmin.sol"; -import {IDefaultUpgrade} from "../upgrades/IDefaultUpgrade.sol"; -import {IDiamondInit} from "./chain-interfaces/IDiamondInit.sol"; -import {IExecutor} from "./chain-interfaces/IExecutor.sol"; -import {IStateTransitionManager, StateTransitionManagerInitializeData, ChainCreationParams} from "./IStateTransitionManager.sol"; -import {ISystemContext} from "./l2-deps/ISystemContext.sol"; -import {IZkSyncHyperchain} from "./chain-interfaces/IZkSyncHyperchain.sol"; -import {FeeParams} from "./chain-deps/ZkSyncHyperchainStorage.sol"; -import {L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, L2_FORCE_DEPLOYER_ADDR} from "../common/L2ContractAddresses.sol"; -import {L2CanonicalTransaction} from "../common/Messaging.sol"; -import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable/access/Ownable2StepUpgradeable.sol"; -import {ProposedUpgrade} from "../upgrades/BaseZkSyncUpgrade.sol"; -import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; -import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, L2_TO_L1_LOG_SERIALIZE_SIZE, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK, SYSTEM_UPGRADE_L2_TX_TYPE, PRIORITY_TX_MAX_GAS_LIMIT} from "../common/Config.sol"; -import {VerifierParams} from "./chain-interfaces/IVerifier.sol"; -import {SemVer} from "../common/libraries/SemVer.sol"; - -/// @title State Transition Manager contract -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -contract StateTransitionManager is IStateTransitionManager, ReentrancyGuard, Ownable2StepUpgradeable { - using EnumerableMap for EnumerableMap.UintToAddressMap; - - /// @notice Address of the bridgehub - address public immutable BRIDGE_HUB; - - /// @notice The total number of hyperchains can be created/connected to this STM. - /// This is the temporary security measure. - uint256 public immutable MAX_NUMBER_OF_HYPERCHAINS; - - /// @notice The map from chainId => hyperchain contract - EnumerableMap.UintToAddressMap internal hyperchainMap; - - /// @dev The batch zero hash, calculated at initialization - bytes32 public storedBatchZero; - - /// @dev The stored cutData for diamond cut - bytes32 public initialCutHash; - - /// @dev The genesisUpgrade contract address, used to setChainId - address public genesisUpgrade; - - /// @dev The current packed protocolVersion. To access human-readable version, use `getSemverProtocolVersion` function. - uint256 public protocolVersion; - - /// @dev The timestamp when protocolVersion can be last used - mapping(uint256 _protocolVersion => uint256) public protocolVersionDeadline; - - /// @dev The validatorTimelock contract address, used to setChainId - address public validatorTimelock; - - /// @dev The stored cutData for upgrade diamond cut. protocolVersion => cutHash - mapping(uint256 protocolVersion => bytes32 cutHash) public upgradeCutHash; - - /// @dev The address used to manage non critical updates - address public admin; - - /// @dev The address to accept the admin role - address private pendingAdmin; - - /// @dev Contract is expected to be used as proxy implementation. - /// @dev Initialize the implementation to prevent Parity hack. - constructor(address _bridgehub, uint256 _maxNumberOfHyperchains) reentrancyGuardInitializer { - BRIDGE_HUB = _bridgehub; - MAX_NUMBER_OF_HYPERCHAINS = _maxNumberOfHyperchains; - - // While this does not provide a protection in the production, it is needed for local testing - // Length of the L2Log encoding should not be equal to the length of other L2Logs' tree nodes preimages - assert(L2_TO_L1_LOG_SERIALIZE_SIZE != 2 * 32); - } - - /// @notice only the bridgehub can call - modifier onlyBridgehub() { - require(msg.sender == BRIDGE_HUB, "STM: only bridgehub"); - _; - } - - /// @notice the admin can call, for non-critical updates - modifier onlyOwnerOrAdmin() { - require(msg.sender == admin || msg.sender == owner(), "STM: not owner or admin"); - _; - } - - /// @return The tuple of (major, minor, patch) protocol version. - function getSemverProtocolVersion() external view returns (uint32, uint32, uint32) { - // slither-disable-next-line unused-return - return SemVer.unpackSemVer(SafeCast.toUint96(protocolVersion)); - } - - /// @notice Returns all the registered hyperchain addresses - function getAllHyperchains() public view override returns (address[] memory chainAddresses) { - uint256[] memory keys = hyperchainMap.keys(); - chainAddresses = new address[](keys.length); - for (uint256 i = 0; i < keys.length; i++) { - chainAddresses[i] = hyperchainMap.get(keys[i]); - } - } - - /// @notice Returns all the registered hyperchain chainIDs - function getAllHyperchainChainIDs() public view override returns (uint256[] memory) { - return hyperchainMap.keys(); - } - - /// @notice Returns the address of the hyperchain with the corresponding chainID - function getHyperchain(uint256 _chainId) public view override returns (address chainAddress) { - // slither-disable-next-line unused-return - (, chainAddress) = hyperchainMap.tryGet(_chainId); - } - - /// @notice Returns the address of the hyperchain admin with the corresponding chainID - function getChainAdmin(uint256 _chainId) external view override returns (address) { - return IZkSyncHyperchain(hyperchainMap.get(_chainId)).getAdmin(); - } - - /// @dev initialize - function initialize( - StateTransitionManagerInitializeData calldata _initializeData - ) external reentrancyGuardInitializer { - require(_initializeData.owner != address(0), "STM: owner zero"); - _transferOwnership(_initializeData.owner); - - protocolVersion = _initializeData.protocolVersion; - protocolVersionDeadline[_initializeData.protocolVersion] = type(uint256).max; - validatorTimelock = _initializeData.validatorTimelock; - - _setChainCreationParams(_initializeData.chainCreationParams); - } - - /// @notice Updates the parameters with which a new chain is created - /// @param _chainCreationParams The new chain creation parameters - function _setChainCreationParams(ChainCreationParams calldata _chainCreationParams) internal { - require(_chainCreationParams.genesisUpgrade != address(0), "STM: genesisUpgrade zero"); - require(_chainCreationParams.genesisBatchHash != bytes32(0), "STM: genesisBatchHash zero"); - require( - _chainCreationParams.genesisIndexRepeatedStorageChanges != uint64(0), - "STM: genesisIndexRepeatedStorageChanges zero" - ); - require(_chainCreationParams.genesisBatchCommitment != bytes32(0), "STM: genesisBatchCommitment zero"); - - genesisUpgrade = _chainCreationParams.genesisUpgrade; - - // We need to initialize the state hash because it is used in the commitment of the next batch - IExecutor.StoredBatchInfo memory batchZero = IExecutor.StoredBatchInfo({ - batchNumber: 0, - batchHash: _chainCreationParams.genesisBatchHash, - indexRepeatedStorageChanges: _chainCreationParams.genesisIndexRepeatedStorageChanges, - numberOfLayer1Txs: 0, - priorityOperationsHash: EMPTY_STRING_KECCAK, - l2LogsTreeRoot: DEFAULT_L2_LOGS_TREE_ROOT_HASH, - timestamp: 0, - commitment: _chainCreationParams.genesisBatchCommitment - }); - storedBatchZero = keccak256(abi.encode(batchZero)); - bytes32 newInitialCutHash = keccak256(abi.encode(_chainCreationParams.diamondCut)); - initialCutHash = newInitialCutHash; - - emit NewChainCreationParams({ - genesisUpgrade: _chainCreationParams.genesisUpgrade, - genesisBatchHash: _chainCreationParams.genesisBatchHash, - genesisIndexRepeatedStorageChanges: _chainCreationParams.genesisIndexRepeatedStorageChanges, - genesisBatchCommitment: _chainCreationParams.genesisBatchCommitment, - newInitialCutHash: newInitialCutHash - }); - } - - /// @notice Updates the parameters with which a new chain is created - /// @param _chainCreationParams The new chain creation parameters - function setChainCreationParams(ChainCreationParams calldata _chainCreationParams) external onlyOwner { - _setChainCreationParams(_chainCreationParams); - } - - /// @notice Starts the transfer of admin rights. Only the current admin can propose a new pending one. - /// @notice New admin can accept admin rights by calling `acceptAdmin` function. - /// @param _newPendingAdmin Address of the new admin - /// @dev Please note, if the owner wants to enforce the admin change it must execute both `setPendingAdmin` and - /// `acceptAdmin` atomically. Otherwise `admin` can set different pending admin and so fail to accept the admin rights. - function setPendingAdmin(address _newPendingAdmin) external onlyOwnerOrAdmin { - // Save previous value into the stack to put it into the event later - address oldPendingAdmin = pendingAdmin; - // Change pending admin - pendingAdmin = _newPendingAdmin; - emit NewPendingAdmin(oldPendingAdmin, _newPendingAdmin); - } - - /// @notice Accepts transfer of admin rights. Only pending admin can accept the role. - function acceptAdmin() external { - address currentPendingAdmin = pendingAdmin; - require(msg.sender == currentPendingAdmin, "n42"); // Only proposed by current admin address can claim the admin rights - - address previousAdmin = admin; - admin = currentPendingAdmin; - delete pendingAdmin; - - emit NewPendingAdmin(currentPendingAdmin, address(0)); - emit NewAdmin(previousAdmin, currentPendingAdmin); - } - - /// @dev set validatorTimelock. Cannot do it during initialization, as validatorTimelock is deployed after STM - function setValidatorTimelock(address _validatorTimelock) external onlyOwnerOrAdmin { - address oldValidatorTimelock = validatorTimelock; - validatorTimelock = _validatorTimelock; - emit NewValidatorTimelock(oldValidatorTimelock, _validatorTimelock); - } - - /// @dev set New Version with upgrade from old version - function setNewVersionUpgrade( - Diamond.DiamondCutData calldata _cutData, - uint256 _oldProtocolVersion, - uint256 _oldProtocolVersionDeadline, - uint256 _newProtocolVersion - ) external onlyOwner { - bytes32 newCutHash = keccak256(abi.encode(_cutData)); - uint256 previousProtocolVersion = protocolVersion; - upgradeCutHash[_oldProtocolVersion] = newCutHash; - protocolVersionDeadline[_oldProtocolVersion] = _oldProtocolVersionDeadline; - protocolVersionDeadline[_newProtocolVersion] = type(uint256).max; - protocolVersion = _newProtocolVersion; - emit NewProtocolVersion(previousProtocolVersion, _newProtocolVersion); - emit NewUpgradeCutHash(_oldProtocolVersion, newCutHash); - emit NewUpgradeCutData(_newProtocolVersion, _cutData); - } - - /// @dev check that the protocolVersion is active - function protocolVersionIsActive(uint256 _protocolVersion) external view override returns (bool) { - return block.timestamp <= protocolVersionDeadline[_protocolVersion]; - } - - /// @dev set the protocol version timestamp - function setProtocolVersionDeadline(uint256 _protocolVersion, uint256 _timestamp) external onlyOwner { - protocolVersionDeadline[_protocolVersion] = _timestamp; - } - - /// @dev set upgrade for some protocolVersion - function setUpgradeDiamondCut( - Diamond.DiamondCutData calldata _cutData, - uint256 _oldProtocolVersion - ) external onlyOwner { - bytes32 newCutHash = keccak256(abi.encode(_cutData)); - upgradeCutHash[_oldProtocolVersion] = newCutHash; - emit NewUpgradeCutHash(_oldProtocolVersion, newCutHash); - } - - /// @dev freezes the specified chain - function freezeChain(uint256 _chainId) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).freezeDiamond(); - } - - /// @dev freezes the specified chain - function unfreezeChain(uint256 _chainId) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).unfreezeDiamond(); - } - - /// @dev reverts batches on the specified chain - function revertBatches(uint256 _chainId, uint256 _newLastBatch) external onlyOwnerOrAdmin { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).revertBatches(_newLastBatch); - } - - /// @dev execute predefined upgrade - function upgradeChainFromVersion( - uint256 _chainId, - uint256 _oldProtocolVersion, - Diamond.DiamondCutData calldata _diamondCut - ) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).upgradeChainFromVersion(_oldProtocolVersion, _diamondCut); - } - - /// @dev executes upgrade on chain - function executeUpgrade(uint256 _chainId, Diamond.DiamondCutData calldata _diamondCut) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).executeUpgrade(_diamondCut); - } - - /// @dev setPriorityTxMaxGasLimit for the specified chain - function setPriorityTxMaxGasLimit(uint256 _chainId, uint256 _maxGasLimit) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).setPriorityTxMaxGasLimit(_maxGasLimit); - } - - /// @dev setTokenMultiplier for the specified chain - function setTokenMultiplier(uint256 _chainId, uint128 _nominator, uint128 _denominator) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).setTokenMultiplier(_nominator, _denominator); - } - - /// @dev changeFeeParams for the specified chain - function changeFeeParams(uint256 _chainId, FeeParams calldata _newFeeParams) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).changeFeeParams(_newFeeParams); - } - - /// @dev setValidator for the specified chain - function setValidator(uint256 _chainId, address _validator, bool _active) external onlyOwnerOrAdmin { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).setValidator(_validator, _active); - } - - /// @dev setPorterAvailability for the specified chain - function setPorterAvailability(uint256 _chainId, bool _zkPorterIsAvailable) external onlyOwner { - IZkSyncHyperchain(hyperchainMap.get(_chainId)).setPorterAvailability(_zkPorterIsAvailable); - } - - /// registration - - /// @dev we have to set the chainId at genesis, as blockhashzero is the same for all chains with the same chainId - function _setChainIdUpgrade(uint256 _chainId, address _chainContract) internal { - bytes memory systemContextCalldata = abi.encodeCall(ISystemContext.setChainId, (_chainId)); - uint256[] memory uintEmptyArray; - bytes[] memory bytesEmptyArray; - - uint256 cachedProtocolVersion = protocolVersion; - // slither-disable-next-line unused-return - (, uint32 minorVersion, ) = SemVer.unpackSemVer(SafeCast.toUint96(cachedProtocolVersion)); - - L2CanonicalTransaction memory l2ProtocolUpgradeTx = L2CanonicalTransaction({ - txType: SYSTEM_UPGRADE_L2_TX_TYPE, - from: uint256(uint160(L2_FORCE_DEPLOYER_ADDR)), - to: uint256(uint160(L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR)), - gasLimit: PRIORITY_TX_MAX_GAS_LIMIT, - gasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, - maxFeePerGas: uint256(0), - maxPriorityFeePerGas: uint256(0), - paymaster: uint256(0), - // Note, that the `minor` of the protocol version is used as "nonce" for system upgrade transactions - nonce: uint256(minorVersion), - value: 0, - reserved: [uint256(0), 0, 0, 0], - data: systemContextCalldata, - signature: new bytes(0), - factoryDeps: uintEmptyArray, - paymasterInput: new bytes(0), - reservedDynamic: new bytes(0) - }); - - ProposedUpgrade memory proposedUpgrade = ProposedUpgrade({ - l2ProtocolUpgradeTx: l2ProtocolUpgradeTx, - factoryDeps: bytesEmptyArray, - bootloaderHash: bytes32(0), - defaultAccountHash: bytes32(0), - verifier: address(0), - verifierParams: VerifierParams({ - recursionNodeLevelVkHash: bytes32(0), - recursionLeafLevelVkHash: bytes32(0), - recursionCircuitsSetVksHash: bytes32(0) - }), - l1ContractsUpgradeCalldata: new bytes(0), - postUpgradeCalldata: new bytes(0), - upgradeTimestamp: 0, - newProtocolVersion: cachedProtocolVersion - }); - - Diamond.FacetCut[] memory emptyArray; - Diamond.DiamondCutData memory cutData = Diamond.DiamondCutData({ - facetCuts: emptyArray, - initAddress: genesisUpgrade, - initCalldata: abi.encodeCall(IDefaultUpgrade.upgrade, (proposedUpgrade)) - }); - - IAdmin(_chainContract).executeUpgrade(cutData); - emit SetChainIdUpgrade(_chainContract, l2ProtocolUpgradeTx, cachedProtocolVersion); - } - - /// @dev used to register already deployed hyperchain contracts - /// @param _chainId the chain's id - /// @param _hyperchain the chain's contract address - function registerAlreadyDeployedHyperchain(uint256 _chainId, address _hyperchain) external onlyOwner { - require(_hyperchain != address(0), "STM: hyperchain zero"); - - _registerNewHyperchain(_chainId, _hyperchain); - } - - /// @notice called by Bridgehub when a chain registers - /// @param _chainId the chain's id - /// @param _baseToken the base token address used to pay for gas fees - /// @param _sharedBridge the shared bridge address, used as base token bridge - /// @param _admin the chain's admin address - /// @param _diamondCut the diamond cut data that initializes the chains Diamond Proxy - function createNewChain( - uint256 _chainId, - address _baseToken, - address _sharedBridge, - address _admin, - bytes calldata _diamondCut - ) external onlyBridgehub { - if (getHyperchain(_chainId) != address(0)) { - // Hyperchain already registered - return; - } - - // check not registered - Diamond.DiamondCutData memory diamondCut = abi.decode(_diamondCut, (Diamond.DiamondCutData)); - - // check input - bytes32 cutHashInput = keccak256(_diamondCut); - require(cutHashInput == initialCutHash, "STM: initial cutHash mismatch"); - - // construct init data - bytes memory initData; - /// all together 4+9*32=292 bytes - // solhint-disable-next-line func-named-parameters - initData = bytes.concat( - IDiamondInit.initialize.selector, - bytes32(_chainId), - bytes32(uint256(uint160(BRIDGE_HUB))), - bytes32(uint256(uint160(address(this)))), - bytes32(uint256(protocolVersion)), - bytes32(uint256(uint160(_admin))), - bytes32(uint256(uint160(validatorTimelock))), - bytes32(uint256(uint160(_baseToken))), - bytes32(uint256(uint160(_sharedBridge))), - bytes32(storedBatchZero), - diamondCut.initCalldata - ); - - diamondCut.initCalldata = initData; - // deploy hyperchainContract - // slither-disable-next-line reentrancy-no-eth - DiamondProxy hyperchainContract = new DiamondProxy{salt: bytes32(0)}(block.chainid, diamondCut); - // save data - address hyperchainAddress = address(hyperchainContract); - - _registerNewHyperchain(_chainId, hyperchainAddress); - - // set chainId in VM - _setChainIdUpgrade(_chainId, hyperchainAddress); - } - - /// @dev This internal function is used to register a new hyperchain in the system. - function _registerNewHyperchain(uint256 _chainId, address _hyperchain) internal { - // slither-disable-next-line unused-return - hyperchainMap.set(_chainId, _hyperchain); - require(hyperchainMap.length() <= MAX_NUMBER_OF_HYPERCHAINS, "STM: Hyperchain limit reached"); - emit NewHyperchain(_chainId, _hyperchain); - } -} diff --git a/l1-contracts/contracts/state-transition/TestnetVerifier.sol b/l1-contracts/contracts/state-transition/TestnetVerifier.sol index 6e97fed05..0063316c0 100644 --- a/l1-contracts/contracts/state-transition/TestnetVerifier.sol +++ b/l1-contracts/contracts/state-transition/TestnetVerifier.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.24; -import {Verifier} from "./Verifier.sol"; +import {DualVerifier} from "./DualVerifier.sol"; import {IVerifier} from "./chain-interfaces/IVerifier.sol"; /// @author Matter Labs @@ -11,24 +11,20 @@ import {IVerifier} from "./chain-interfaces/IVerifier.sol"; /// @dev This contract is used to skip the zkp verification for the testnet environment. /// If the proof is not empty, it will verify it using the main verifier contract, /// otherwise, it will skip the verification. -contract TestnetVerifier is Verifier { +contract TestnetVerifier is DualVerifier { constructor() { assert(block.chainid != 1); } /// @dev Verifies a zk-SNARK proof, skipping the verification if the proof is empty. /// @inheritdoc IVerifier - function verify( - uint256[] calldata _publicInputs, - uint256[] calldata _proof, - uint256[] calldata _recursiveAggregationInput - ) public view override returns (bool) { + function verify(uint256[] calldata _publicInputs, uint256[] calldata _proof) public view override returns (bool) { // We allow skipping the zkp verification for the test(net) environment // If the proof is not empty, verify it, otherwise, skip the verification if (_proof.length == 0) { return true; } - return super.verify(_publicInputs, _proof, _recursiveAggregationInput); + return super.verify(_publicInputs, _proof); } } diff --git a/l1-contracts/contracts/state-transition/ValidatorTimelock.sol b/l1-contracts/contracts/state-transition/ValidatorTimelock.sol index d793783d6..64cc0bc20 100644 --- a/l1-contracts/contracts/state-transition/ValidatorTimelock.sol +++ b/l1-contracts/contracts/state-transition/ValidatorTimelock.sol @@ -2,18 +2,19 @@ pragma solidity 0.8.24; -import {Ownable2Step} from "@openzeppelin/contracts/access/Ownable2Step.sol"; +import {Ownable2Step} from "@openzeppelin/contracts-v4/access/Ownable2Step.sol"; import {LibMap} from "./libraries/LibMap.sol"; import {IExecutor} from "./chain-interfaces/IExecutor.sol"; -import {IStateTransitionManager} from "./IStateTransitionManager.sol"; +import {IChainTypeManager} from "./IChainTypeManager.sol"; +import {Unauthorized, TimeNotReached, ZeroAddress} from "../common/L1ContractErrors.sol"; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -/// @notice Intermediate smart contract between the validator EOA account and the hyperchains state transition diamond smart contract. +/// @notice Intermediate smart contract between the validator EOA account and the ZK chains state transition diamond smart contract. /// @dev The primary purpose of this contract is to provide a trustless means of delaying batch execution without -/// modifying the main hyperchain diamond contract. As such, even if this contract is compromised, it will not impact the main +/// modifying the main zkChain diamond contract. As such, even if this contract is compromised, it will not impact the main /// contract. -/// @dev zkSync actively monitors the chain activity and reacts to any suspicious activity by freezing the chain. +/// @dev ZKsync actively monitors the chain activity and reacts to any suspicious activity by freezing the chain. /// This allows time for investigation and mitigation before resuming normal operations. /// @dev The contract overloads all of the 4 methods, that are used in state transition. When the batch is committed, /// the timestamp is stored for it. Later, when the owner calls the batch execution, the contract checks that batch @@ -39,8 +40,8 @@ contract ValidatorTimelock is IExecutor, Ownable2Step { /// @notice Error for when an address is not a validator. error ValidatorDoesNotExist(uint256 _chainId); - /// @dev The stateTransitionManager smart contract. - IStateTransitionManager public stateTransitionManager; + /// @dev The chainTypeManager smart contract. + IChainTypeManager public chainTypeManager; /// @dev The mapping of L2 chainId => batch number => timestamp when it was committed. mapping(uint256 chainId => LibMap.Uint32Map batchNumberToTimestampMapping) internal committedBatchTimestamp; @@ -52,7 +53,7 @@ contract ValidatorTimelock is IExecutor, Ownable2Step { uint32 public executionDelay; /// @dev Era's chainID - uint256 immutable ERA_CHAIN_ID; + uint256 internal immutable ERA_CHAIN_ID; constructor(address _initialOwner, uint32 _executionDelay, uint256 _eraChainId) { _transferOwnership(_initialOwner); @@ -62,19 +63,26 @@ contract ValidatorTimelock is IExecutor, Ownable2Step { /// @notice Checks if the caller is the admin of the chain. modifier onlyChainAdmin(uint256 _chainId) { - require(msg.sender == stateTransitionManager.getChainAdmin(_chainId), "ValidatorTimelock: only chain admin"); + if (msg.sender != chainTypeManager.getChainAdmin(_chainId)) { + revert Unauthorized(msg.sender); + } _; } /// @notice Checks if the caller is a validator. modifier onlyValidator(uint256 _chainId) { - require(validators[_chainId][msg.sender], "ValidatorTimelock: only validator"); + if (!validators[_chainId][msg.sender]) { + revert Unauthorized(msg.sender); + } _; } /// @dev Sets a new state transition manager. - function setStateTransitionManager(IStateTransitionManager _stateTransitionManager) external onlyOwner { - stateTransitionManager = _stateTransitionManager; + function setChainTypeManager(IChainTypeManager _chainTypeManager) external onlyOwner { + if (address(_chainTypeManager) == address(0)) { + revert ZeroAddress(); + } + chainTypeManager = _chainTypeManager; } /// @dev Sets an address as a validator. @@ -107,114 +115,79 @@ contract ValidatorTimelock is IExecutor, Ownable2Step { } /// @dev Records the timestamp for all provided committed batches and make - /// a call to the hyperchain diamond contract with the same calldata. - function commitBatches( - StoredBatchInfo calldata, - CommitBatchInfo[] calldata _newBatchesData - ) external onlyValidator(ERA_CHAIN_ID) { - _commitBatchesInner(ERA_CHAIN_ID, _newBatchesData); - } - - /// @dev Records the timestamp for all provided committed batches and make - /// a call to the hyperchain diamond contract with the same calldata. + /// a call to the zkChain diamond contract with the same calldata. function commitBatchesSharedBridge( uint256 _chainId, - StoredBatchInfo calldata, - CommitBatchInfo[] calldata _newBatchesData + uint256 _processBatchFrom, + uint256 _processBatchTo, + bytes calldata ) external onlyValidator(_chainId) { - _commitBatchesInner(_chainId, _newBatchesData); - } - - function _commitBatchesInner(uint256 _chainId, CommitBatchInfo[] calldata _newBatchesData) internal { unchecked { // This contract is only a temporary solution, that hopefully will be disabled until 2106 year, so... // It is safe to cast. uint32 timestamp = uint32(block.timestamp); - for (uint256 i = 0; i < _newBatchesData.length; ++i) { - committedBatchTimestamp[_chainId].set(_newBatchesData[i].batchNumber, timestamp); + // We disable this check because calldata array length is cheap. + for (uint256 i = _processBatchFrom; i <= _processBatchTo; ++i) { + committedBatchTimestamp[_chainId].set(i, timestamp); } } - - _propagateToZkSyncHyperchain(_chainId); - } - - /// @dev Make a call to the hyperchain diamond contract with the same calldata. - /// Note: If the batch is reverted, it needs to be committed first before the execution. - /// So it's safe to not override the committed batches. - function revertBatches(uint256) external onlyValidator(ERA_CHAIN_ID) { - _propagateToZkSyncHyperchain(ERA_CHAIN_ID); + _propagateToZKChain(_chainId); } - /// @dev Make a call to the hyperchain diamond contract with the same calldata. + /// @dev Make a call to the zkChain diamond contract with the same calldata. /// Note: If the batch is reverted, it needs to be committed first before the execution. /// So it's safe to not override the committed batches. function revertBatchesSharedBridge(uint256 _chainId, uint256) external onlyValidator(_chainId) { - _propagateToZkSyncHyperchain(_chainId); - } - - /// @dev Make a call to the hyperchain diamond contract with the same calldata. - /// Note: We don't track the time when batches are proven, since all information about - /// the batch is known on the commit stage and the proved is not finalized (may be reverted). - function proveBatches( - StoredBatchInfo calldata, - StoredBatchInfo[] calldata, - ProofInput calldata - ) external onlyValidator(ERA_CHAIN_ID) { - _propagateToZkSyncHyperchain(ERA_CHAIN_ID); + _propagateToZKChain(_chainId); } - /// @dev Make a call to the hyperchain diamond contract with the same calldata. + /// @dev Make a call to the zkChain diamond contract with the same calldata. /// Note: We don't track the time when batches are proven, since all information about /// the batch is known on the commit stage and the proved is not finalized (may be reverted). function proveBatchesSharedBridge( uint256 _chainId, - StoredBatchInfo calldata, - StoredBatchInfo[] calldata, - ProofInput calldata + uint256, // _processBatchFrom + uint256, // _processBatchTo + bytes calldata ) external onlyValidator(_chainId) { - _propagateToZkSyncHyperchain(_chainId); + _propagateToZKChain(_chainId); } /// @dev Check that batches were committed at least X time ago and - /// make a call to the hyperchain diamond contract with the same calldata. - function executeBatches(StoredBatchInfo[] calldata _newBatchesData) external onlyValidator(ERA_CHAIN_ID) { - _executeBatchesInner(ERA_CHAIN_ID, _newBatchesData); - } - - /// @dev Check that batches were committed at least X time ago and - /// make a call to the hyperchain diamond contract with the same calldata. + /// make a call to the zkChain diamond contract with the same calldata. function executeBatchesSharedBridge( uint256 _chainId, - StoredBatchInfo[] calldata _newBatchesData + uint256 _processBatchFrom, + uint256 _processBatchTo, + bytes calldata ) external onlyValidator(_chainId) { - _executeBatchesInner(_chainId, _newBatchesData); - } - - function _executeBatchesInner(uint256 _chainId, StoredBatchInfo[] calldata _newBatchesData) internal { uint256 delay = executionDelay; // uint32 unchecked { - for (uint256 i = 0; i < _newBatchesData.length; ++i) { - uint256 commitBatchTimestamp = committedBatchTimestamp[_chainId].get(_newBatchesData[i].batchNumber); + // We disable this check because calldata array length is cheap. + for (uint256 i = _processBatchFrom; i <= _processBatchTo; ++i) { + uint256 commitBatchTimestamp = committedBatchTimestamp[_chainId].get(i); // Note: if the `commitBatchTimestamp` is zero, that means either: // * The batch was committed, but not through this contract. - // * The batch wasn't committed at all, so execution will fail in the zkSync contract. + // * The batch wasn't committed at all, so execution will fail in the ZKsync contract. // We allow executing such batches. - require(block.timestamp >= commitBatchTimestamp + delay, "5c"); // The delay is not passed + if (block.timestamp < commitBatchTimestamp + delay) { + revert TimeNotReached(commitBatchTimestamp + delay, block.timestamp); + } } } - _propagateToZkSyncHyperchain(_chainId); + _propagateToZKChain(_chainId); } - /// @dev Call the hyperchain diamond contract with the same calldata as this contract was called. - /// Note: it is called the hyperchain diamond contract, not delegatecalled! - function _propagateToZkSyncHyperchain(uint256 _chainId) internal { - address contractAddress = stateTransitionManager.getHyperchain(_chainId); + /// @dev Call the zkChain diamond contract with the same calldata as this contract was called. + /// Note: it is called the zkChain diamond contract, not delegatecalled! + function _propagateToZKChain(uint256 _chainId) internal { + address contractAddress = chainTypeManager.getZKChain(_chainId); assembly { // Copy function signature and arguments from calldata at zero position into memory at pointer position calldatacopy(0, 0, calldatasize()) - // Call method of the hyperchain diamond contract returns 0 on error + // Call method of the ZK chain diamond contract returns 0 on error let result := call(gas(), contractAddress, 0, 0, calldatasize(), 0, 0) // Get the size of the last return data let size := returndatasize() diff --git a/l1-contracts/contracts/state-transition/Verifier.sol b/l1-contracts/contracts/state-transition/Verifier.sol index 922f21ca3..da8677ad5 100644 --- a/l1-contracts/contracts/state-transition/Verifier.sol +++ b/l1-contracts/contracts/state-transition/Verifier.sol @@ -8,13 +8,14 @@ import {IVerifier} from "./chain-interfaces/IVerifier.sol"; /// @author Matter Labs /// @notice Modified version of the Permutations over Lagrange-bases for Oecumenical Noninteractive arguments of /// Knowledge (PLONK) verifier. -/// Modifications have been made to optimize the proof system for zkSync hyperchain circuits. +/// Modifications have been made to optimize the proof system for ZK chain circuits. +/// @dev Contract was generated from a verification key with a hash of 0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2 /// @dev It uses a custom memory layout inside the inline assembly block. Each reserved memory cell is declared in the /// constants below. /// @dev For a better understanding of the verifier algorithm please refer to the following papers: /// * Original Plonk Article: https://eprint.iacr.org/2019/953.pdf /// * Original LookUp Article: https://eprint.iacr.org/2020/315.pdf -/// * Plonk for zkSync v1.1: https://github.com/matter-labs/solidity_plonk_verifier/raw/recursive/bellman_vk_codegen_recursive/RecursivePlonkUnrolledForEthereum.pdf +/// * Plonk for ZKsync v1.1: https://github.com/matter-labs/solidity_plonk_verifier/raw/recursive/bellman_vk_codegen_recursive/RecursivePlonkUnrolledForEthereum.pdf /// The notation used in the code is the same as in the papers. /* solhint-enable max-line-length */ contract Verifier is IVerifier { @@ -341,10 +342,9 @@ contract Verifier is IVerifier { /// @inheritdoc IVerifier function verify( - uint256[] calldata, // _publicInputs - uint256[] calldata, // _proof - uint256[] calldata // _recursiveAggregationInput - ) public view virtual returns (bool) { + uint256[] calldata _publicInputs, + uint256[] calldata _proof + ) external view virtual returns (bool) { // No memory was accessed yet, so keys can be loaded into the right place and not corrupt any other memory. _loadVerificationKey(); @@ -522,7 +522,17 @@ contract Verifier is IVerifier { // 2. Load the proof (except for the recursive part) offset := calldataload(0x24) let proofLengthInWords := calldataload(add(offset, 0x04)) - isValid := and(eq(proofLengthInWords, 44), isValid) + + // Check the proof length depending on whether the recursive part is present + let expectedProofLength + switch mload(VK_RECURSIVE_FLAG_SLOT) + case 0 { + expectedProofLength := 44 + } + default { + expectedProofLength := 48 + } + isValid := and(eq(proofLengthInWords, expectedProofLength), isValid) // PROOF_STATE_POLYS_0 { @@ -669,21 +679,13 @@ contract Verifier is IVerifier { } // 3. Load the recursive part of the proof - offset := calldataload(0x44) - let recursiveProofLengthInWords := calldataload(add(offset, 0x04)) - - switch mload(VK_RECURSIVE_FLAG_SLOT) - case 0 { - // recursive part should be empty - isValid := and(iszero(recursiveProofLengthInWords), isValid) - } - default { + if mload(VK_RECURSIVE_FLAG_SLOT) { // recursive part should be consist of 2 points - isValid := and(eq(recursiveProofLengthInWords, 4), isValid) + // PROOF_RECURSIVE_PART_P1 { - let x := mod(calldataload(add(offset, 0x024)), Q_MOD) - let y := mod(calldataload(add(offset, 0x044)), Q_MOD) + let x := mod(calldataload(add(offset, 0x5a4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x5c4)), Q_MOD) let xx := mulmod(x, x, Q_MOD) isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) mstore(PROOF_RECURSIVE_PART_P1_X_SLOT, x) @@ -691,8 +693,8 @@ contract Verifier is IVerifier { } // PROOF_RECURSIVE_PART_P2 { - let x := mod(calldataload(add(offset, 0x064)), Q_MOD) - let y := mod(calldataload(add(offset, 0x084)), Q_MOD) + let x := mod(calldataload(add(offset, 0x5e4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x604)), Q_MOD) let xx := mulmod(x, x, Q_MOD) isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) mstore(PROOF_RECURSIVE_PART_P2_X_SLOT, x) diff --git a/l1-contracts/contracts/state-transition/VerifierFflonk.sol b/l1-contracts/contracts/state-transition/VerifierFflonk.sol new file mode 100644 index 000000000..1c9103395 --- /dev/null +++ b/l1-contracts/contracts/state-transition/VerifierFflonk.sol @@ -0,0 +1,1026 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IVerifier} from "./chain-interfaces/IVerifier.sol"; + +/// @title Fflonk Verifier Implementation +/// @author Matter Labs +/// @notice FFT inspired version of PlonK to optimize on-chain gas cost +/// @dev For better understanding of the protocol follow the below papers: +/// * Fflonk Paper: https://eprint.iacr.org/2021/1167 +/// @custom:security-contact security@matterlabs.dev +contract VerifierFflonk is IVerifier{ +// ================Constants================ +uint32 internal constant DST_0 = 0; +uint32 internal constant DST_1 = 1; +uint32 internal constant DST_CHALLENGE = 2; +uint256 internal constant FR_MASK = 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff; +uint256 internal constant Q_MOD = 21888242871839275222246405745257275088696311157297823662689037894645226208583; +uint256 internal constant R_MOD = 21888242871839275222246405745257275088548364400416034343698204186575808495617; +uint256 internal constant BN254_B_COEFF = 3; + +// ================Verification Key================ +uint256 internal constant VK_NUM_INPUTS = 1; +// [C0]1 = qL(X^8)+ X*qR(X^8)+ X^2*qO(X^8)+ X^3*qM(X^8)+ X^4*qC(X^8)+ X^5*Sσ1(X^8)+ X^6*Sσ2(X^8)+ X^7*Sσ3(X^8) +uint256 internal constant VK_C0_G1_X = 0x15c99dbc62b8191204ff93984b0de4fb7c79ac7a1ef2c94f4ce940319a2408b2; +uint256 internal constant VK_C0_G1_Y = 0x0521b86a104e07c8971bf2e17d7665d59df7566c08e6e0c9750f584bb24084ce; +// k1 = 5, k2 = 7 +uint256 internal constant VK_NON_RESIDUES_0 = 0x0000000000000000000000000000000000000000000000000000000000000005; +uint256 internal constant VK_NON_RESIDUES_1 = 0x0000000000000000000000000000000000000000000000000000000000000007; +// G2 Elements = [1]_2, [s]_2 +uint256 internal constant VK_G2_ELEMENT_0_X1 = 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2; +uint256 internal constant VK_G2_ELEMENT_0_X2 = 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed; +uint256 internal constant VK_G2_ELEMENT_0_Y1 = 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b; +uint256 internal constant VK_G2_ELEMENT_0_Y2 = 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa; +uint256 internal constant VK_G2_ELEMENT_1_X1 = 0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1; +uint256 internal constant VK_G2_ELEMENT_1_X2 = 0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0; +uint256 internal constant VK_G2_ELEMENT_1_Y1 = 0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4; +uint256 internal constant VK_G2_ELEMENT_1_Y2 = 0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55; + +// Memory slots from 0x000 to 0x200 are reserved for intermediate computations and call to precompiles. + +// ================Transcript================ +// ================Constants================ +uint256 internal constant ONE = 1; +uint256 internal constant DOMAIN_SIZE = 8388608; +uint256 internal constant OMEGA = 0x1283ba6f4b7b1a76ba2008fe823128bea4adb9269cbfd7c41c223be65bc60863; +// ========================================= +uint256 internal constant TRANSCRIPT_BEGIN_SLOT = 0x200; +uint256 internal constant TRANSCRIPT_DST_BYTE_SLOT = 0x203; +uint256 internal constant TRANSCRIPT_STATE_0_SLOT = 0x204; +uint256 internal constant TRANSCRIPT_STATE_1_SLOT = 0x224; +uint256 internal constant TRANSCRIPT_CHALLENGE_SLOT = 0x244; + +// ================PartialVerifierState================ +// copy-permutation challenges +uint256 internal constant PVS_BETA = 0x264 + 0x00; +uint256 internal constant PVS_GAMMA = 0x264 + 0x20; +// evaluation challenges +uint256 internal constant PVS_R = 0x264 + 0x40; +uint256 internal constant PVS_Z = 0x264 + 0x60; +uint256 internal constant PVS_Z_OMEGA = 0x264 + 0x80; +// aggregation challenge +uint256 internal constant PVS_ALPHA_0 = 0x264 + 0xa0; +uint256 internal constant PVS_ALPHA_1 = 0x264 + 0xc0; +// final evaluation challenge +uint256 internal constant PVS_Y = 0x264 + 0xe0; +// convenience +uint256 internal constant PVS_VANISHING_AT_Z = 0x264 + 0x100; +uint256 internal constant PVS_VANISHING_AT_Z_INV = 0x264 + 0x120; +uint256 internal constant PVS_L_0_AT_Z = 0x264 + 0x140; +uint256 internal constant MAIN_GATE_QUOTIENT_AT_Z = 0x264 + 0x160; +uint256 internal constant COPY_PERM_FIRST_QUOTIENT_AT_Z = 0x264 + 0x180; +uint256 internal constant COPY_PERM_SECOND_QUOTIENT_AT_Z = 0x264 + 0x1a0; +// ================Opening State================ +// h0, h1, h2, h2_shifted +uint256 internal constant OPS_OPENING_POINTS = 0x264 + 0x1c0 + 0x00; // 4 slots +uint256 internal constant OPS_Y_POWS = 0x264 + 0x1c0 + 0x80; // 9 SLOTS + +// ================Pairing State================ + +uint256 internal constant PS_VANISHING_AT_Y = 0x264 + 0x1c0 + 0x1a0; +uint256 internal constant PS_INV_ZTS0_AT_Y = 0x264 + 0x1c0 + 0x1c0; +uint256 internal constant PS_SET_DIFFERENCES_AT_Y = 0x264 + 0x1c0 + 0x1e0; // 3 slots +uint256 internal constant PS_MINUS_Z = 0x264 + 0x1c0 + 0x240; // 2 slots +uint256 internal constant PS_R_EVALS = 0x264 + 0x1c0 + 0x280; // 3 slots + +// ================In Memory(from Proof)================ +uint256 internal constant MEM_PROOF_PUBLIC_INPUT_SLOT = 0x264 + 0x1c0 + 0x2e0; + +uint256 internal constant MEM_PROOF_COMMITMENT_0_G1_X = 0x264 + 0x1c0 + 0x2e0 + 0x20; +uint256 internal constant MEM_PROOF_COMMITMENT_0_G1_Y = 0x264 + 0x1c0 + 0x2e0 + 0x40; +uint256 internal constant MEM_PROOF_COMMITMENT_1_G1_X = 0x264 + 0x1c0 + 0x2e0 + 0x60; +uint256 internal constant MEM_PROOF_COMMITMENT_1_G1_Y = 0x264 + 0x1c0 + 0x2e0 + 0x80; +uint256 internal constant MEM_PROOF_COMMITMENT_2_G1_X = 0x264 + 0x1c0 + 0x2e0 + 0xa0; +uint256 internal constant MEM_PROOF_COMMITMENT_2_G1_Y = 0x264 + 0x1c0 + 0x2e0 + 0xc0; +uint256 internal constant MEM_PROOF_COMMITMENT_3_G1_X = 0x264 + 0x1c0 + 0x2e0 + 0xe0; +uint256 internal constant MEM_PROOF_COMMITMENT_3_G1_Y = 0x264 + 0x1c0 + 0x2e0 + 0x100; + +uint256 internal constant MEM_PROOF_EVALUATIONS = 0x264 + 0x1c0 + 0x2e0 + 0x120; // 15 slots + +uint256 internal constant MEM_PROOF_MONTGOMERY_LAGRANGE_BASIS_INVERSE = 0x264 + 0x1c0 + 0x2e0 + 0x120 + 0x1e0; // 1 slots + +uint256 internal constant MEM_LAGRANGE_BASIS_DENOMS = 0x264 + 0x1c0 + 0x2e0 + 0x120 + 0x200; //18 slots +uint256 internal constant MEM_LAGRANGE_BASIS_DENOM_PRODUCTS = 0x264 + 0x1c0 + 0x2e0 + 0x120 + 0x440; // 18 slots +uint256 internal constant MEM_PROOF_LAGRANGE_BASIS_EVALS = 0x264 + 0x1c0 + 0x2e0 + 0x120 + 0x680; // 18 Slots + +// ================Constants================ +uint256 internal constant PROOF_PUBLIC_INPUTS_LENGTH = 1; +uint256 internal constant PROOF_LENGTH = 24; +uint256 internal constant PROOF_EVALUATIONS_LENGTH = 15; +uint256 internal constant TOTAL_LAGRANGE_BASIS_INVERSES_LENGTH = 18; + + +/// @inheritdoc IVerifier +function verificationKeyHash() external pure returns (bytes32 vkHash) { + return keccak256(abi.encodePacked( + VK_NUM_INPUTS, + VK_C0_G1_X, + VK_C0_G1_Y, + VK_NON_RESIDUES_0, + VK_NON_RESIDUES_1, + VK_G2_ELEMENT_0_X1, + VK_G2_ELEMENT_0_X2, + VK_G2_ELEMENT_0_Y1, + VK_G2_ELEMENT_0_Y2, + VK_G2_ELEMENT_1_X1, + VK_G2_ELEMENT_1_X2, + VK_G2_ELEMENT_1_Y1, + VK_G2_ELEMENT_1_Y2 + )); +} + +/// @inheritdoc IVerifier +function verify( + uint256[] calldata _publicInputs, + uint256[] calldata _proof +) external view returns(bool) { + // Beginning of the big inline assembly block that makes all the verification work. + // Note: We use the custom memory layout, so the return value should be returned from the assembly, not + // Solidity code. + assembly { + // load public inputs and proof from the calldata + load_inputs() + initialize_transcript() + // identities at verifier's point + compute_main_gate_quotient() + compute_copy_permutation_quotients() + // openings + initialize_opening_state() + // final pairing + let result := check_openings() + mstore(0, result) + return(0, 0x20) + + function load_inputs() { + // 1. Load public inputs + let publicInputOffset := calldataload(0x04) + let publicInputLengthInWords := calldataload(add(publicInputOffset, 0x04)) + // We expect only one public input + if iszero(eq(publicInputLengthInWords, PROOF_PUBLIC_INPUTS_LENGTH)) { + revertWithMessage(32, "public input length is incorrect") + } + mstore(MEM_PROOF_PUBLIC_INPUT_SLOT, mod(calldataload(add(publicInputOffset, 0x24)), R_MOD)) + + // 2. Load proof + let proofLengthOffset := calldataload(0x24) + let proofLengthInWords := calldataload(add(proofLengthOffset, 0x04)) + + if iszero(eq(proofLengthInWords, PROOF_LENGTH)) { + revertWithMessage(25, "proof length is incorrect") + } + let proofOffset := add(proofLengthOffset, 0x24) + // Note: We don't accept the point-at-infinity as a valid input for the commitments considering the security risks involved, + // as it may aid in proof manipulation and final pairing computation. + { + let x := mod(calldataload(proofOffset), Q_MOD) + let y := mod(calldataload(add(proofOffset, 0x20)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + if iszero(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD))) { + revertWithMessage(28, "commitment 0 is not on curve") + } + mstore(MEM_PROOF_COMMITMENT_0_G1_Y, y) + mstore(MEM_PROOF_COMMITMENT_0_G1_X, x) + } + { + let x := mod(calldataload(add(proofOffset, 0x40)), Q_MOD) + let y := mod(calldataload(add(proofOffset, 0x60)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + if iszero(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD))) { + revertWithMessage(28, "commitment 1 is not on curve") + } + mstore(MEM_PROOF_COMMITMENT_1_G1_Y, y) + mstore(MEM_PROOF_COMMITMENT_1_G1_X, x) + } + { + let x := mod(calldataload(add(proofOffset, 0x80)), Q_MOD) + let y := mod(calldataload(add(proofOffset, 0xa0)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + if iszero(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD))) { + revertWithMessage(28, "commitment 2 is not on curve") + } + mstore(MEM_PROOF_COMMITMENT_2_G1_Y, y) + mstore(MEM_PROOF_COMMITMENT_2_G1_X, x) + } + { + let x := mod(calldataload(add(proofOffset, 0xc0)), Q_MOD) + let y := mod(calldataload(add(proofOffset, 0xe0)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + if iszero(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD))) { + revertWithMessage(28, "commitment 3 is not on curve") + } + mstore(MEM_PROOF_COMMITMENT_3_G1_Y, y) + mstore(MEM_PROOF_COMMITMENT_3_G1_X, x) + } + proofOffset := add(proofOffset, 0x100) + + for {let i:=0} lt(i, PROOF_EVALUATIONS_LENGTH) {i:=add(i,1)} { + let eval := mod(calldataload(add(proofOffset, mul(i, 0x20))), R_MOD) + let slot := add(MEM_PROOF_EVALUATIONS, mul(i, 0x20)) + mstore(slot, eval) + } + proofOffset := add(proofOffset, mul(PROOF_EVALUATIONS_LENGTH, 0x20)) + + mstore(MEM_PROOF_MONTGOMERY_LAGRANGE_BASIS_INVERSE, mod(calldataload(proofOffset), R_MOD)) + } + + /** + * @dev Commits data in the transcript then gets the challenges + * @notice that at this point, the transcript only has public inputs + * But luckily prover doesn't need any randomness in the first round + * so that prover has no control over the values because quotients are + * separated(there is no quotient aggregation neither in this round nor all rounds) + * + * w = 0x1283ba6f4b7b1a76ba2008fe823128bea4adb9269cbfd7c41c223be65bc60863 + */ + function initialize_transcript() { + if iszero(lt(DOMAIN_SIZE, R_MOD)){ + revertWithMessage(26, "Domain size >= R_MOD [ITS]") + } + if iszero(lt(OMEGA, R_MOD)){ + revertWithMessage(20, "Omega >= R_MOD [ITS]") + } + for {let i:=0} lt(i,VK_NUM_INPUTS) {i:=add(i,1)}{ + update_transcript(mload(add(MEM_PROOF_PUBLIC_INPUT_SLOT, mul(i,0x20)))) + } + // commit first round commitment: preprocessed polynomials + update_transcript(VK_C0_G1_X) + update_transcript(VK_C0_G1_Y) + + // commit second round commitment: witnesses and gate identities + update_transcript(mload(MEM_PROOF_COMMITMENT_0_G1_X)) + update_transcript(mload(MEM_PROOF_COMMITMENT_0_G1_Y)) + + // copy-permutation challenges + mstore(PVS_BETA, get_challenge(0)) + mstore(PVS_GAMMA, get_challenge(1)) + // commit third round commitment: copy-perm + update_transcript(mload(MEM_PROOF_COMMITMENT_1_G1_X)) + update_transcript(mload(MEM_PROOF_COMMITMENT_1_G1_Y)) + // get evaluation challenge + // all system polynomials will be evaluated at z + // then combined polynomials will be opened at h_i = r^power_i + // then it becomes e.g C_i(X) = f_0(x^2) + x*f(x^2) in case of two polynomials + mstore(PVS_R, get_challenge(2)) + // commit all evaluations + for {let i:=0} lt(i, PROOF_EVALUATIONS_LENGTH) {i:=add(i,1)}{ + update_transcript(mload(add(MEM_PROOF_EVALUATIONS, mul(i,0x20)))) + } + // get aggregation challenge + mstore(PVS_ALPHA_0, get_challenge(3)) + mstore(PVS_ALPHA_1, mulmod(mload(PVS_ALPHA_0), mload(PVS_ALPHA_0), R_MOD)) + // commit w(X) + update_transcript(mload(MEM_PROOF_COMMITMENT_2_G1_X)) + update_transcript(mload(MEM_PROOF_COMMITMENT_2_G1_Y)) + // opening challenge + mstore(PVS_Y, get_challenge(4)) + mstore(PVS_Z, modexp(mload(PVS_R), 24)) + // grand product of copy-permutation needs to be opened at shifted position + mstore(PVS_Z_OMEGA, mulmod(mload(PVS_Z), OMEGA, R_MOD)) + // Z_h(z) = X^N - 1 + mstore(PVS_VANISHING_AT_Z, addmod(modexp(mload(PVS_Z), DOMAIN_SIZE), sub(R_MOD,ONE) ,R_MOD)) + // L0(z) = 1/(N*(X-1)) * (X^N - 1) + mstore(PVS_L_0_AT_Z, modexp(mulmod(addmod(mload(PVS_Z), sub(R_MOD, ONE), R_MOD), DOMAIN_SIZE, R_MOD), sub(R_MOD,2))) + mstore(PVS_L_0_AT_Z, mulmod(mload(PVS_L_0_AT_Z), mload(PVS_VANISHING_AT_Z), R_MOD)) + mstore(PVS_VANISHING_AT_Z_INV, modexp(mload(PVS_VANISHING_AT_Z), sub(R_MOD,2))) + } + + /** + * @dev Computes main gate quotient T0(ζ) + * T0(ζ) = (qm(ζ)*a(ζ)*b(ζ) + qa(ζ)*a(ζ) + qb(ζ)*b(ζ) + qc(ζ)*c(ζ) + qconst(ζ) + PI*L0(ζ)) * ZH(ζ)^-1 + */ + function compute_main_gate_quotient() { + // q_const + let rhs := mload(add(MEM_PROOF_EVALUATIONS, mul(4, 0x20))) + rhs := addmod(rhs, mulmod(mload(PVS_L_0_AT_Z), mload(MEM_PROOF_PUBLIC_INPUT_SLOT), R_MOD), R_MOD) + for {let i:=0} lt(i,3) {i := add(i,1)}{ + rhs := addmod(rhs, mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(i, 0x20))), mload(add(MEM_PROOF_EVALUATIONS, mul(add(8, i), 0x20))), R_MOD), R_MOD) + } + // q_m*A*B + rhs := mulmod(addmod(rhs, mulmod(mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(3, 0x20))), mload(add(MEM_PROOF_EVALUATIONS, mul(8, 0x20))), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(9, 0x20))), R_MOD), R_MOD), mload(PVS_VANISHING_AT_Z_INV), R_MOD) + mstore(MAIN_GATE_QUOTIENT_AT_Z, rhs) + } + + /** + * @dev Computes copy permutation quotients T1(ζ) & T2(ζ) + * T1(ζ) = ((z(ζ) * (a(ζ)+β*ζ+γ) * (b(ζ)+k1*β*ζ+γ) * (c(ζ)+k2*β*ζ+γ)) + * −(z(ζω) * (a(ζ)+β*sσ1(ζ)+γ) * (b(ζ)+β*sσ2(ζ)+γ) * (c(ζ)+β*sσ3(ζ)+γ)) * ZH(ζ)^-1 + * T2(ζ) = (z(ζ)−1)*L0(ζ)*ZH(ζ)^-1 + */ + function compute_copy_permutation_quotients() { + let tmp + let tmp2 + // (c(ζ)+k2*β*ζ+γ) + let rhs := addmod(addmod(mulmod(mulmod(mload(PVS_BETA), mload(PVS_Z), R_MOD), VK_NON_RESIDUES_1, R_MOD), mload(PVS_GAMMA), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(add(8,2), 0x20))), R_MOD) + // (b(ζ)+k1*β*ζ+γ) + tmp := addmod(addmod(mulmod(mulmod(mload(PVS_BETA), mload(PVS_Z), R_MOD), VK_NON_RESIDUES_0, R_MOD), mload(PVS_GAMMA), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(add(8,1), 0x20))), R_MOD) + // (b(ζ)+k1*β*ζ+γ) * (c(ζ)+k2*β*ζ+γ) + rhs := mulmod(rhs, tmp, R_MOD) + // (z(ζ) * (a(ζ)+β*ζ+γ) * (b(ζ)+k1*β*ζ+γ) * (c(ζ)+k2*β*ζ+γ) + rhs := mulmod(mulmod(rhs, addmod(addmod(mulmod(mload(PVS_BETA), mload(PVS_Z), R_MOD), mload(PVS_GAMMA), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(8, 0x20))), R_MOD), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(11, 0x20))), R_MOD) + + // (z(ζω) * (b(ζ)+β*sσ2(ζ)+γ) * (c(ζ)+β*sσ3(ζ)+γ)) + tmp2 := mulmod(mulmod(addmod(addmod(mulmod(mload(PVS_BETA), mload(add(MEM_PROOF_EVALUATIONS, mul(add(5,2), 0x20))), R_MOD), mload(PVS_GAMMA), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(add(8,2), 0x20))), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(12, 0x20))), R_MOD), addmod(addmod(mulmod(mload(PVS_BETA), mload(add(MEM_PROOF_EVALUATIONS, mul(add(5,1), 0x20))), R_MOD), mload(PVS_GAMMA), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(add(8,1), 0x20))), R_MOD), R_MOD) + // (a(ζ)+β*sσ1(ζ)+γ) + tmp := addmod(addmod(mulmod(mload(PVS_BETA), mload(add(MEM_PROOF_EVALUATIONS, mul(5, 0x20))), R_MOD), mload(PVS_GAMMA), R_MOD), mload(add(MEM_PROOF_EVALUATIONS, mul(8, 0x20))), R_MOD) + // z(ζω) * (a(ζ)+β*sσ1(ζ)+γ) * (b(ζ)+β*sσ2(ζ)+γ) * (c(ζ)+β*sσ3(ζ)+γ) + tmp2 := mulmod(tmp2, tmp, R_MOD) + // −(z(ζω) * (a(ζ)+β*sσ1(ζ)+γ) * (b(ζ)+β*sσ2(ζ)+γ) * (c(ζ)+β*sσ3(ζ)+γ)) + tmp2 := sub(R_MOD,tmp2) + // ((z(ζ) * (a(ζ)+β*ζ+γ) * (b(ζ)+k1*β*ζ+γ) * (c(ζ)+k2*β*ζ+γ)) − (z(ζω) * (a(ζ)+β*sσ1(ζ)+γ) * (b(ζ)+β*sσ2(ζ)+γ) * (c(ζ)+β*sσ3(ζ)+γ)) * ZH(ζ)^-1 + rhs := mulmod(addmod(rhs, tmp2, R_MOD), mload(PVS_VANISHING_AT_Z_INV), R_MOD) + mstore(COPY_PERM_FIRST_QUOTIENT_AT_Z, rhs) + + // (z(ζ)−1)*L0(ζ)*ZH(ζ)^-1 + rhs := mulmod(mulmod(addmod(mload(add(MEM_PROOF_EVALUATIONS, mul(11, 0x20))), sub(R_MOD, 1), R_MOD), mload(PVS_L_0_AT_Z), R_MOD), mload(PVS_VANISHING_AT_Z_INV), R_MOD) + mstore(COPY_PERM_SECOND_QUOTIENT_AT_Z, rhs) + } + + /** + * @dev Computes partial lagrange basis evaluations Li(y)_numerator {i = [start..(start+num_polys))} using montgomery lagrange basis inverses sent with proof. + * Li(y)_numerator = (w_i * (y^{num_polys} - h^{num_polys})) + * Li(y)_denominator = (num_polys * h^{num_polys-1} * (y - (h * w_i))) + * Li(y) = Li(y)_numerator / Li(y)_denominator = (w_i * (y^{num_polys} - h^{num_polys})) / (num_polys * h^{num_polys-1} * (y - (h * w_i))) + * + * Also calculates the products of the denominators of the lagrange basis evaluations: + * Li(y)_denominators_product = Li(y)_previous_denominators_product * (∏(Li(y)_denominator {i = [start..(start+num_polys))})) + */ + function precompute_partial_lagrange_basis_evaluations(start, num_polys, y, omega, h, product) -> interim_product { + if gt(add(start, num_polys), TOTAL_LAGRANGE_BASIS_INVERSES_LENGTH){ + revertWithMessage(31, "Precompute Eval. Error [PLBEI1]") + } + let tmp := h + let loop_length := sub(num_polys,2) + // h^{num_polys-1} + for {let i:=0} lt(i,loop_length) {i := add(i,1)}{ + tmp := mulmod(tmp, h, R_MOD) + } + // num_polys * h^{num_polys-1} + let constant_part := mulmod(num_polys, tmp, R_MOD) + + // y^{num_polys} + let y_pow := mload(add(OPS_Y_POWS, mul(num_polys, 0x20))) + // h^{num_polys} + let num_at_y := mulmod(tmp, h, R_MOD) + // -h^{num_polys} + num_at_y := sub(R_MOD, num_at_y) + // (y^{num_polys} - h^{num_polys}) + num_at_y := addmod(num_at_y, y_pow, R_MOD) + + let current_omega := 1 + for {let i:=0} lt(i, num_polys) {i := add(i,1)}{ + // h*w_i + tmp := mulmod(current_omega, h, R_MOD) + // -h*w_i + tmp := sub(R_MOD, tmp) + // y-(h*w_i) + tmp := addmod(tmp, y, R_MOD) + // (num_polys * h^{num_polys-1} * (y - (h * w_i))) + tmp := mulmod(tmp, constant_part, R_MOD) + + mstore(add(MEM_LAGRANGE_BASIS_DENOMS, mul(add(start, i), 0x20)), tmp) + + product := mulmod(product, tmp, R_MOD) + + mstore(add(MEM_LAGRANGE_BASIS_DENOM_PRODUCTS, mul(add(start, i), 0x20)), product) + // Li(y) = (W_i * (y^{num_polys} - h^{num_polys})) + mstore(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(start, i), 0x20)), mulmod(num_at_y, current_omega, R_MOD)) + + // w_i {i = i+1} + current_omega := mulmod(current_omega, omega, R_MOD) + } + + interim_product := product + } + + /** + * @dev Computes partial lagrange basis evaluations Li(y)_numerator = {i = [start..(start+num_polys))} & Li(y)_numerator {i = [(start+num_polys)..(start+(2*num_polys)))} using montgomery lagrange basis inverses sent with proof. + * For Li(y)_numerator{i = [start..(start+num_polys))}: + * Li(y)_numerator = w_i * (y^{2*num_polys} + (h^{num_polys} * h_s^{num_polys}) - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys}))) + * Li(y)_denominator = (num_polys * (h^{(2*num_polys)-1}-(h^{num_polys-1} * h_s^{num_polys})) * (y-(h*w_i))) + * Li(y) = Li(y)_numerator / Li(y)_denominator = (w_i * (y^{2*num_polys} + (h^{num_polys} * h_s^{num_polys}) - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys})))) / (num_polys * (h^{(2*num_polys)-1}-(h^{num_polys-1} * h_s^{num_polys})) * (y-(h*w_i))) + * + * For Li(y)_numerator{i = [(start+num_polys)..(start+(2*num_polys)))} + * Li(y)_numerator = w_i * (y^{2*num_polys} + (h^{num_polys} * h_s^{num_polys}) - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys}))) + * Li(y)_denominator = (num_polys * (h_s^{(2*num_polys)-1}-(h_s^{num_polys-1} * h^{num_polys})) * (y-(h_s*w_i))) + * Li(y) = Li(y)_numerator / Li(y)_denominator = (w_i * (y^{2*num_polys} + (h^{num_polys} * h_s^{num_polys}) - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys}))) ) / (num_polys * (h_s^{(2*num_polys)-1}-(h_s^{num_polys-1} * h^{num_polys})) * (y-(h_s*w_i))) + * + * Also calculates the products of the denominators of the lagrange basis evaluations: + * Li(y)_denominators_product = Li(y)_previous_denominators_product * (∏(Li(y)_denominator {i = [start..(start+num_polys))})) * (∏(Li(y)_denominator {i = [(start+num_polys)..(start+(2*num_polys)))})) + */ + + function precompute_partial_lagrange_basis_evaluations_for_union_set(start, num_polys, y, omega, h, h_shifted, product) -> final_product{ + if gt(add(start, mul(2,num_polys)), TOTAL_LAGRANGE_BASIS_INVERSES_LENGTH){ + revertWithMessage(32, "Precompute Eval. Error [PLBEIU1]") + } + let h_pows_0 := h + let h_pows_1 := h_shifted + let loop_length := sub(num_polys,2) + // h^{num_polys-1} & h_s^{num_polys-1} + for {let i:=0} lt(i, loop_length) {i := add(i,1)}{ + h_pows_0 := mulmod(h_pows_0, h, R_MOD) + h_pows_1 := mulmod(h_pows_1, h_shifted, R_MOD) + } + let constant_parts_0 := h_pows_0 + let constant_parts_1 := h_pows_1 + // h^{num_polys} + h_pows_0 := mulmod(h_pows_0, h, R_MOD) + // h_s^{num_polys} + h_pows_1 := mulmod(h_pows_1, h_shifted, R_MOD) + + // h^{num_polys-1} * h_s^{num_polys} + constant_parts_0 := mulmod(constant_parts_0, h_pows_1, R_MOD) + // -h^{num_polys-1} * h_s^{num_polys} + constant_parts_0 := sub(R_MOD, constant_parts_0) + // h_s^{num_polys-1} * h^{num_polys} + constant_parts_1 := mulmod(constant_parts_1, h_pows_0, R_MOD) + // -h_s^{num_polys-1} * h^{num_polys} + constant_parts_1 := sub(R_MOD, constant_parts_1) + + // y^{num_polys} + let t_2 := mload(add(OPS_Y_POWS, mul(num_polys, 0x20))) + // h^{num_polys} * h_s^{num_polys} + let t_1 := mulmod(h_pows_0, h_pows_1, R_MOD) + // h^{num_polys} + h_s^{num_polys} + let t_0 := addmod(h_pows_0, h_pows_1, R_MOD) + // y^{num_polys} * (h^{num_polys} + h_s^{num_polys}) + t_0 := mulmod(t_0, t_2, R_MOD) + // - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys})) + t_0 := sub(R_MOD, t_0) + // h^{num_polys} * h_s^{num_polys} - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys})) + t_1 := addmod(t_1, t_0, R_MOD) + // y^{2*num_polys} + t_2 := mulmod(t_2, t_2, R_MOD) + // y^{2*num_polys} + (h^{num_polys} * h_s^{num_polys}) - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys})) + t_1 := addmod(t_1, t_2, R_MOD) + loop_length := sub(num_polys,1) + // h^{(2*num_polys)-1} & h_s^{(2*num_polys)-1} + for {let i:=0} lt(i, loop_length) {i := add(i,1)}{ + h_pows_0 := mulmod(h_pows_0, h, R_MOD) + h_pows_1 := mulmod(h_pows_1, h_shifted, R_MOD) + } + // h^{(2*num_polys)-1}-(h^{num_polys-1} * h_s^{num_polys}) + constant_parts_0 := addmod(constant_parts_0, h_pows_0, R_MOD) + // num_polys * (h^{(2*num_polys)-1}-(h^{num_polys-1} * h_s^{num_polys})) + constant_parts_0 := mulmod(constant_parts_0, num_polys, R_MOD) + // h_s^{(2*num_polys)-1}-(h_s^{num_polys-1} * h^{num_polys}) + constant_parts_1 := addmod(constant_parts_1, h_pows_1, R_MOD) + // num_polys * (h_s^{(2*num_polys)-1}-(h_s^{num_polys-1} * h^{num_polys})) + constant_parts_1 := mulmod(constant_parts_1, num_polys, R_MOD) + + + let current_omega := 1 + let interim_product := product + for {let i:=0} lt(i, num_polys) {i := add(i,1)}{ + t_0 := mulmod(current_omega, h, R_MOD) + t_0 := sub(R_MOD, t_0) + t_0 := addmod(t_0, y, R_MOD) + // (num_polys * (h^{(2*num_polys)-1}-(h^{num_polys-1} * h_s^{num_polys})) * (y-(h*w_i))) + t_0 := mulmod(t_0, constant_parts_0, R_MOD) + + mstore(add(MEM_LAGRANGE_BASIS_DENOMS, mul(add(start, i), 0x20)), t_0) + + interim_product := mulmod(interim_product, t_0, R_MOD) + + mstore(add(MEM_LAGRANGE_BASIS_DENOM_PRODUCTS, mul(add(start, i), 0x20)), interim_product) + // w_i * (y^{2*num_polys} + (h^{num_polys} * h_s^{num_polys}) - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys}))) + mstore(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(start, i), 0x20)), mulmod(t_1, current_omega, R_MOD)) + // w_i {i = i+1} + current_omega := mulmod(current_omega, omega, R_MOD) + + } + + current_omega := 1 + for {let i:=0} lt(i, num_polys) {i := add(i,1)}{ + + t_0 := mulmod(current_omega, h_shifted, R_MOD) + t_0 := sub(R_MOD, t_0) + t_0 := addmod(t_0, y, R_MOD) + // (num_polys * (h_s^{(2*num_polys)-1}-(h_s^{num_polys-1} * h^{num_polys})) * (y-(h_s*w_i))) + t_0 := mulmod(t_0, constant_parts_1, R_MOD) + + mstore(add(MEM_LAGRANGE_BASIS_DENOMS, mul(add(add(start, num_polys), i), 0x20)), t_0) + + interim_product := mulmod(interim_product, t_0, R_MOD) + + mstore(add(MEM_LAGRANGE_BASIS_DENOM_PRODUCTS, mul(add(add(start, num_polys), i), 0x20)), interim_product) + // w_i * (y^{2*num_polys} + (h^{num_polys} * h_s^{num_polys}) - (y^{num_polys} * (h^{num_polys} + h_s^{num_polys}))) + mstore(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(add(start, num_polys), i), 0x20)), mulmod(t_1, current_omega, R_MOD)) + // w_i {i = i+1} + current_omega := mulmod(current_omega, omega, R_MOD) + } + + final_product := interim_product + } + + /** + * @dev Computes lagrange basis evaluations using montgomery lagrange basis inverses sent with proof. + * @notice Check individual functions for more details + */ + function precompute_all_lagrange_basis_evaluations_from_inverses() { + let y := mload(PVS_Y) + // w8 = 0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80 + // w4 = 0x30644e72e131a029048b6e193fd841045cea24f6fd736bec231204708f703636 + // w3 = 0x0000000000000000b3c4d79d41a917585bfc41088d8daaa78b17ea66b99c90dd + let product_0_7 := precompute_partial_lagrange_basis_evaluations(0, 8, y, 0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))), 1) + let product_0_11 := precompute_partial_lagrange_basis_evaluations(8, 4, y, 0x30644e72e131a029048b6e193fd841045cea24f6fd736bec231204708f703636, mload(add(OPS_OPENING_POINTS, mul(1, 0x20))), product_0_7) + let product_0_17 := precompute_partial_lagrange_basis_evaluations_for_union_set(add(8, 4), 3, y, 0x0000000000000000b3c4d79d41a917585bfc41088d8daaa78b17ea66b99c90dd, mload(add(OPS_OPENING_POINTS, mul(2, 0x20))), mload(add(OPS_OPENING_POINTS, mul(3, 0x20))), product_0_11) + + let montgomery_inverse:= mload(MEM_PROOF_MONTGOMERY_LAGRANGE_BASIS_INVERSE) + + if iszero(eq(mulmod(product_0_17, montgomery_inverse, R_MOD),1)) { + revertWithMessage(30, "Precompute Eval. Error [PALBE]") + } + let temp := montgomery_inverse + let loop_length := sub(TOTAL_LAGRANGE_BASIS_INVERSES_LENGTH,1) + for {let i:=loop_length} gt(i, 0) {i := sub(i,1)}{ + mstore(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(i,0x20)), mulmod(mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(i,0x20))), mulmod(mload(add(MEM_LAGRANGE_BASIS_DENOM_PRODUCTS, mul(sub(i,1), 0x20))), temp, R_MOD), R_MOD)) + temp := mulmod(temp, mload(add(MEM_LAGRANGE_BASIS_DENOMS, mul(i, 0x20))), R_MOD) + } + mstore(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(0, 0x20)), mulmod(mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(0, 0x20))), temp, R_MOD)) + } + + /** + * @dev Computes opening points h0, h1, h2, h3 + */ + function compute_opening_points(){ + // h = r^{power/num_polys} + let pvs_r := mload(PVS_R) + let r_2 := mulmod(pvs_r, pvs_r, R_MOD) + let r_3 := mulmod(r_2, pvs_r, R_MOD) + let r_6 := mulmod(r_3, r_3, R_MOD) + let r_8 := mulmod(r_6, r_2, R_MOD) + // h0 = pvs_r^3 + mstore(add(OPS_OPENING_POINTS, mul(0, 0x20)), r_3) + // h1 = pvs_r^6 + mstore(add(OPS_OPENING_POINTS, mul(1, 0x20)), r_6) + // h2 = pvs_r^8 + mstore(add(OPS_OPENING_POINTS, mul(2, 0x20)), r_8) + + // h3 (only round 2 needs opening at shifted point) + mstore(add(OPS_OPENING_POINTS, mul(3, 0x20)), mulmod(r_8, 0x0925f0bd364638ec3084b45fc27895f8f3f6f079096600fe946c8e9db9a47124, R_MOD)) + } + + /** + * @dev Initializes opening state OPS_Y_POWS[i] = y^i + * @notice only 9 powers are computed since the rest stay unused. + */ + function initialize_opening_state() { + compute_opening_points() + let acc := 1 + for {let i:=0} lt(i, 9) {i := add(i,1)}{ + mstore(add(OPS_Y_POWS, mul(i, 0x20)), acc) + acc := mulmod(acc, mload(PVS_Y), R_MOD) + } + precompute_all_lagrange_basis_evaluations_from_inverses() + + } + + /** + * @dev Computes r polynomial evaluations utilizing horner method + * (r*w)^{i}:{1, w*r, (w*r)^2, .. , (w*r)^{k-1}} + * horner: c0 + c1*(rw) + c2*(rw)^2 + c3*(rw)^3 -> (c0 + (rw)*(c1 + (rw)*(c2 + c3*(rw)))) + */ + function evaluate_r_polys_at_point_unrolled(main_gate_quotient_at_z, copy_perm_first_quotient_at_z, copy_perm_second_quotient_at_z){ + let omega_h + let c + + // setup round + // r + + // w8^1 = 0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80 + // w8^2 = 0x30644e72e131a029048b6e193fd841045cea24f6fd736bec231204708f703636 + // w8^3 = 0x1d59376149b959ccbd157ac850893a6f07c2d99b3852513ab8d01be8e846a566 + // w8^4 = 0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000 + // w8^5 = 0x0530d09118705106cbb4a786ead16926d5d174e181a26686af5448492e42a181 + // w8^6 = 0x0000000000000000b3c4d79d41a91758cb49c3517c4604a520cff123608fc9cb + // w8^7 = 0x130b17119778465cfb3acaee30f81dee20710ead41671f568b11d9ab07b95a9b + omega_h := mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(0 ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) , R_MOD) + + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(1 ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x30644e72e131a029048b6e193fd841045cea24f6fd736bec231204708f703636, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) , R_MOD) + + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(2 ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x1d59376149b959ccbd157ac850893a6f07c2d99b3852513ab8d01be8e846a566, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) , R_MOD) + + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(3 ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) , R_MOD) + + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(4 ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x0530d09118705106cbb4a786ead16926d5d174e181a26686af5448492e42a181, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) , R_MOD) + + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(5 ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x0000000000000000b3c4d79d41a91758cb49c3517c4604a520cff123608fc9cb, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) , R_MOD) + + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(6 ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x130b17119778465cfb3acaee30f81dee20710ead41671f568b11d9ab07b95a9b, mload(add(OPS_OPENING_POINTS, mul(0, 0x20))) , R_MOD) + + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(7, 0x20))), omega_h, R_MOD) + for {let i:=1} lt(i,7) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(7,i), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(MEM_PROOF_EVALUATIONS), R_MOD) + mstore(PS_R_EVALS, addmod(mload(PS_R_EVALS), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(7 ,0x20))), R_MOD), R_MOD)) + + + // first round + // r + + // w4^1 = 0x30644e72e131a029048b6e193fd841045cea24f6fd736bec231204708f703636 + // w4^2 = 0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000 + // w4^3 = 0x0000000000000000b3c4d79d41a91758cb49c3517c4604a520cff123608fc9cb + omega_h := mload(add(OPS_OPENING_POINTS, mul(1, 0x20))) + + c := mulmod(main_gate_quotient_at_z, omega_h, R_MOD) + for {let i:=1} lt(i,3) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),i),1), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),3),1), 0x20))), R_MOD) + + mstore(add(PS_R_EVALS, mul(1,0x20)), addmod(mload(add(PS_R_EVALS, mul(1,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(8,0) ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x30644e72e131a029048b6e193fd841045cea24f6fd736bec231204708f703636, mload(add(OPS_OPENING_POINTS, mul(1, 0x20))) , R_MOD) + + c := mulmod(main_gate_quotient_at_z, omega_h, R_MOD) + for {let i:=1} lt(i,3) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),i),1), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),3),1), 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(1,0x20)), addmod(mload(add(PS_R_EVALS, mul(1,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(8,1) ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000, mload(add(OPS_OPENING_POINTS, mul(1, 0x20))) , R_MOD) + + c := mulmod(main_gate_quotient_at_z, omega_h, R_MOD) + for {let i:=1} lt(i,3) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),i),1), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),3),1), 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(1,0x20)), addmod(mload(add(PS_R_EVALS, mul(1,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(8,2) ,0x20))), R_MOD), R_MOD)) + omega_h := mulmod(0x0000000000000000b3c4d79d41a91758cb49c3517c4604a520cff123608fc9cb, mload(add(OPS_OPENING_POINTS, mul(1, 0x20))) , R_MOD) + + c := mulmod(main_gate_quotient_at_z, omega_h, R_MOD) + for {let i:=1} lt(i,3) {i := add(i,1)} { + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),i),1), 0x20))), R_MOD), omega_h, R_MOD) + } + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(sub(sub(add(8,4),3),1), 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(1,0x20)), addmod(mload(add(PS_R_EVALS, mul(1,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(8,3) ,0x20))), R_MOD), R_MOD)) + + // second round + // c2 + // r + omega_h := mload(add(OPS_OPENING_POINTS, mul(2, 0x20))) + let omega_h_shifted := mload(add(OPS_OPENING_POINTS, mul(3, 0x20))) + c := mulmod(copy_perm_second_quotient_at_z, omega_h, R_MOD) + c := mulmod(addmod(c, copy_perm_first_quotient_at_z, R_MOD), omega_h, R_MOD) + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(11, 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(2,0x20)), addmod(mload(add(PS_R_EVALS, mul(2,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(add(8,4),0) ,0x20))), R_MOD), R_MOD)) + // c2 shifted + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(add(12,2), 0x20))), omega_h_shifted, R_MOD) + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(add(12,1), 0x20))), R_MOD), omega_h_shifted, R_MOD) + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(12, 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(2,0x20)), addmod(mload(add(PS_R_EVALS, mul(2,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(add(add(8,4),3),0) ,0x20))), R_MOD), R_MOD)) + // c2 + omega_h := mulmod(0x0000000000000000b3c4d79d41a917585bfc41088d8daaa78b17ea66b99c90dd, mload(add(OPS_OPENING_POINTS, mul(2, 0x20))) , R_MOD) + omega_h_shifted := mulmod(0x0000000000000000b3c4d79d41a917585bfc41088d8daaa78b17ea66b99c90dd, mload(add(OPS_OPENING_POINTS, mul(3, 0x20))) , R_MOD) + + c := mulmod(copy_perm_second_quotient_at_z, omega_h, R_MOD) + c := mulmod(addmod(c, copy_perm_first_quotient_at_z, R_MOD), omega_h, R_MOD) + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(11, 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(2,0x20)), addmod(mload(add(PS_R_EVALS, mul(2,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(add(8,4),1) ,0x20))), R_MOD), R_MOD)) + // c2 shifted + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(add(12,2), 0x20))), omega_h_shifted, R_MOD) + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(add(12,1), 0x20))), R_MOD), omega_h_shifted, R_MOD) + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(12, 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(2,0x20)), addmod(mload(add(PS_R_EVALS, mul(2,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(add(add(8,4),3),1) ,0x20))), R_MOD), R_MOD)) + // c2 + omega_h := mulmod(0x30644e72e131a029048b6e193fd84104cc37a73fec2bc5e9b8ca0b2d36636f23, mload(add(OPS_OPENING_POINTS, mul(2, 0x20))) , R_MOD) + omega_h_shifted := mulmod(0x30644e72e131a029048b6e193fd84104cc37a73fec2bc5e9b8ca0b2d36636f23, mload(add(OPS_OPENING_POINTS, mul(3, 0x20))) , R_MOD) + + c := mulmod(copy_perm_second_quotient_at_z, omega_h, R_MOD) + c := mulmod(addmod(c, copy_perm_first_quotient_at_z, R_MOD), omega_h, R_MOD) + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(11, 0x20))), R_MOD) + mstore(add(PS_R_EVALS, mul(2,0x20)), addmod(mload(add(PS_R_EVALS, mul(2,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(add(8,4),2) ,0x20))), R_MOD), R_MOD)) + // c2 shifted + c := mulmod(mload(add(MEM_PROOF_EVALUATIONS, mul(add(12,2), 0x20))), omega_h_shifted, R_MOD) + c := mulmod(addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(add(12,1), 0x20))), R_MOD), omega_h_shifted, R_MOD) + c := addmod(c, mload(add(MEM_PROOF_EVALUATIONS, mul(12, 0x20))), R_MOD) + + mstore(add(PS_R_EVALS, mul(2,0x20)), addmod(mload(add(PS_R_EVALS, mul(2,0x20))), mulmod(c, mload(add(MEM_PROOF_LAGRANGE_BASIS_EVALS, mul(add(add(add(8,4),3),2) ,0x20))), R_MOD), R_MOD)) + } + + /** + * @dev Computes the openings and returns the result of pairing computation + */ + function check_openings() -> out { + // f(X) = (Z_{T\S0}(y) * (C0(X) - r0(y))) + (alpha*(Z_{T\S1}(y)*(C1(X) - r1(y)))) + (alpha^{2}*(Z_{T\S2}(y)*(C2(X) - r2(y)))) + // Note that, in our case set differences(Z_T\{S_i}) are: + // - Z_{T\S0}(y): (y^{k1}-ζ)*(y^{k2}-ζ)*(y^{k2}-(ζ*w)) + // - Z_{T\S1}(y): (y^{k0}-ζ)*(y^{k2}-ζ)*(y^{k2}-(ζ*w)) + // - Z_{T\S2}(y): (y^{k0}-ζ)*(y^{k1}-ζ) where + // k0=8, k1=4, and k2=3 are number of the polynomials for setup, first and second round respectively + + let tmp + evaluate_r_polys_at_point_unrolled(mload(MAIN_GATE_QUOTIENT_AT_Z), mload(COPY_PERM_FIRST_QUOTIENT_AT_Z), mload(COPY_PERM_SECOND_QUOTIENT_AT_Z)) + + // -ζ + mstore(add(PS_MINUS_Z, mul(0, 0x20)), sub(R_MOD, mload(PVS_Z))) + // -(ζ*w) + mstore(add(PS_MINUS_Z, mul(1, 0x20)), sub(R_MOD, mload(PVS_Z_OMEGA))) + + // Z_{T\S0}(y) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(0, 0x20)), addmod(mload(add(OPS_Y_POWS, mul(3, 0x20))), mload(add(PS_MINUS_Z, mul(1, 0x20))), R_MOD)) + tmp := addmod(mload(add(OPS_Y_POWS, mul(3, 0x20))), mload(add(PS_MINUS_Z, mul(0, 0x20))),R_MOD) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(0, 0x20)), mulmod(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(0, 0x20))), tmp, R_MOD)) + tmp := addmod(mload(add(OPS_Y_POWS, mul(4, 0x20))), mload(add(PS_MINUS_Z, mul(0, 0x20))),R_MOD) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(0, 0x20)), mulmod(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(0, 0x20))), tmp, R_MOD)) + mstore(PS_VANISHING_AT_Y, mload(add(PS_SET_DIFFERENCES_AT_Y, mul(0, 0x20)))) + mstore(PS_INV_ZTS0_AT_Y, modexp(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(0, 0x20))), sub(R_MOD,2))) + + // Z_{T\S1}(y) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(1, 0x20)), addmod(mload(add(OPS_Y_POWS, mul(3, 0x20))), mload(add(PS_MINUS_Z, mul(1, 0x20))), R_MOD)) + tmp := addmod(mload(add(OPS_Y_POWS, mul(3, 0x20))), mload(add(PS_MINUS_Z, mul(0, 0x20))),R_MOD) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(1, 0x20)), mulmod(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(1, 0x20))), tmp, R_MOD)) + tmp := addmod(mload(add(OPS_Y_POWS, mul(8, 0x20))), mload(add(PS_MINUS_Z, mul(0, 0x20))),R_MOD) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(1, 0x20)), mulmod(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(1, 0x20))), tmp, R_MOD)) + mstore(PS_VANISHING_AT_Y, mulmod(mload(PS_VANISHING_AT_Y), tmp, R_MOD)) + + // // Z_{T\S2}(y) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(2, 0x20)), addmod(mload(add(OPS_Y_POWS, mul(4, 0x20))), mload(add(PS_MINUS_Z, mul(0, 0x20))), R_MOD)) + tmp := addmod(mload(add(OPS_Y_POWS, mul(8, 0x20))), mload(add(PS_MINUS_Z, mul(0, 0x20))),R_MOD) + mstore(add(PS_SET_DIFFERENCES_AT_Y, mul(2, 0x20)), mulmod(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(2, 0x20))), tmp, R_MOD)) + + // W(X) = f(X) / Z_T(y) where Z_T(y) = (y^{k0}-ζ)*(y^{k1}-ζ)*(y^{k2}-ζ)*(y^{k2}-(ζ*w)) + // we need to check that + // f(X) - W(X) * Z_T(y) = 0 + + // W'(X) = L(X) / (Z_{T\S0}(y)*(X-y)) + // L(X)/Z_{T\S0}(y) = (C0(X) - r0(y)) + (alpha*(Z_{T\S1}(y)/Z_{T\S0}(y))*(C1(X) - r1(y))) + (alpha^{2}*(Z_{T\S2}(y)/Z_{T\S0}(y))*(C2(X) - r2(y))) - ((Z_T(y)/Z_{T\S0}(y))*W(X)) + + // the identity check is reduced into following + // L(X) - W'(X)*Z_{T\S0}(y)(X-y) == 0 + // verifier has commitments to the C_i(X) polynomials + // verifer also recomputed r_i(y) + // group constant and commitment parts + // first prepare L(X)/Z_{T\S0}(y) + // C(X) = C0(X) + ((alpha*Z_{T\S1}(y)/Z_{T\S0}(y))*C1(X)) + ((alpha^2*Z_{T\S2}(y)/Z_{T\S0}(y))*C2(X)) + // r(y) = r0(y) + ((alpha*Z_{T\S1}(y)/Z_{T\S0}(y))*r1(y)) + ((alpha^2*Z_{T\S2}(y)/Z_{T\S0}(y))*r2(y)) + // now construct + // L(X)/Z_{T\S0}(y) = C(X) - r(y) - ((Z_T(y)/Z_{T\S0}(y))*W(X)) + // now check following identity + // C(X) - r(y) - ((Z_t(y)/Z_{T\S0}(y))*W(X)) - (W'(X)*(X-y)) = 0 + // [C(X)] - [r(y)*G1] - (Z_T(y)/Z_{T\S0}(y))*[W] - [(X-y)*W'] = 0 + // [C(X)] - [r(y)*G1] - (Z_T(y)/Z_{T\S0}(y))*[W] - [X*W'] + [y*W]' = 0 + // [C(X)] - [r(y)*G1] - (Z_T(y)/Z_{T\S0}(y))*[W] + [y*W'] - [X*W'] = 0 + // points with X will be multiplied in the exponent via pairing + // so final pairing would ne + // e([C(X)] - [r(y)*G1] - [Z_T(y)/(Z_{T\S0}(y)*W)] + [y*W'], G2)*e(-W', X*G2) = 1 + + // C0 + let ps_aggregated_commitment_g1_x := VK_C0_G1_X + let ps_aggregated_commitment_g1_y := VK_C0_G1_Y + + // ((alpha^{2}*Z_{T\S2}(y))/Z_{T\S0}(y)) + let aggregated_r_at_y := mulmod(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(2, 0x20))), mload(PS_INV_ZTS0_AT_Y), R_MOD) + aggregated_r_at_y := mulmod(aggregated_r_at_y, mload(PVS_ALPHA_1), R_MOD) + + // ((alpha^{2}*Z_{T\S2}(y))/Z_{T\S0}(y))*C2 + let tp_g1_x, tp_g1_y := point_mul(mload(MEM_PROOF_COMMITMENT_1_G1_X), mload(MEM_PROOF_COMMITMENT_1_G1_Y), aggregated_r_at_y) + // c0 + (((alpha^{2}*Z_{T\S2}(y))/Z_{T\S0}(y))*C2) + ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y := point_add(ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y, tp_g1_x, tp_g1_y) + // ((alpha^{2}*Z_{T\S2}(y))/Z_{T\S0}(y))*r2 + aggregated_r_at_y := mulmod(aggregated_r_at_y, mload(add(PS_R_EVALS, mul(2,0x20))), R_MOD) + + // (alpha*Z_{T\S1}(y)/Z_{T\S0}(y)) + tmp := mulmod(mload(add(PS_SET_DIFFERENCES_AT_Y, mul(1, 0x20))), mload(PS_INV_ZTS0_AT_Y), R_MOD) + tmp := mulmod(tmp, mload(PVS_ALPHA_0), R_MOD) + + // (alpha*Z_{T\S1}(y)/Z_{T\S0}(y))*C1 + tp_g1_x, tp_g1_y := point_mul(mload(MEM_PROOF_COMMITMENT_0_G1_X), mload(MEM_PROOF_COMMITMENT_0_G1_Y), tmp) + // c0 + ((alpha*Z_{T\S1}(y)/Z_{T\S0}(y))*C1) + (((alpha^{2}*Z_{T\S2}(y))/Z_{T\S0}(y))*C2) + ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y := point_add(ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y, tp_g1_x, tp_g1_y) + // (alpha*Z_{T\S1}(y)/Z_{T\S0}(y))*r1 + tmp := mulmod(tmp, mload(add(PS_R_EVALS, mul(1,0x20))), R_MOD) + // ((alpha*Z_{T\S1}(y)/Z_{T\S0}(y))*r1) + ((alpha^{2}*Z_{T\S2}(y)/Z_{T\S0}(y))*r2) + aggregated_r_at_y := addmod(aggregated_r_at_y, tmp, R_MOD) + // r0 + (alpha*Z_{T\S1}(y)/Z_{T\S0}(y))*r1 + ((alpha^{2}*Z_{T\S2}(y)/Z_{T\S0}(y))*r2) + aggregated_r_at_y := addmod(aggregated_r_at_y, mload(PS_R_EVALS), R_MOD) + tp_g1_x, tp_g1_y := point_mul(1, 2, aggregated_r_at_y) + ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y := point_sub(ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y, tp_g1_x, tp_g1_y) + // - ((Z_T(y)/Z_{T\S0}(y))*W(X)) + mstore(PS_VANISHING_AT_Y, mulmod(mload(PS_VANISHING_AT_Y), mload(PS_INV_ZTS0_AT_Y), R_MOD)) + tp_g1_x, tp_g1_y := point_mul(mload(MEM_PROOF_COMMITMENT_2_G1_X), mload(MEM_PROOF_COMMITMENT_2_G1_Y), mload(PS_VANISHING_AT_Y)) + ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y := point_sub(ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y, tp_g1_x, tp_g1_y) + // L(X)/Z_{T\S0}(y) is aggregated + + // Now check W'(X) = L(X) / (Z_{T\S0}(y)*(x-y)) + // L(X)/Z_{T\S0}(y) + (y*W'(X)) - (x*W'(X)) = 0 + tp_g1_x, tp_g1_y := point_mul(mload(MEM_PROOF_COMMITMENT_3_G1_X), mload(MEM_PROOF_COMMITMENT_3_G1_Y), mload(PVS_Y)) + ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y := point_add(ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y, tp_g1_x, tp_g1_y) + let is_zero_commitment + if iszero(mload(MEM_PROOF_COMMITMENT_3_G1_Y)) { + if gt(mload(MEM_PROOF_COMMITMENT_3_G1_X), 0) { + revertWithMessage(21, "non zero x value [CO]") + } + is_zero_commitment := 1 + } + + out := pairing_check(ps_aggregated_commitment_g1_x, ps_aggregated_commitment_g1_y, is_zero_commitment) + + } + + /** + * @dev Generates the rolling hash using `val` and updates the transcript. + * The computation is done as follows: + * new_state_0 = keccak256(uint32(0) || old_state_0 || old_state_1 || value) + * new_state_1 = keccak256(uint32(1) || old_state_0 || old_state_1 || value) + * + * @notice The computation assumes that the memory slots 0x200 - 0x202 are clean and doesn't explicitly clean them + */ + function update_transcript(value) { + mstore8(TRANSCRIPT_DST_BYTE_SLOT, 0x00) + mstore(TRANSCRIPT_CHALLENGE_SLOT, value) + let newState0 := keccak256(TRANSCRIPT_BEGIN_SLOT, 0x64) + mstore8(TRANSCRIPT_DST_BYTE_SLOT, 0x01) + let newState1 := keccak256(TRANSCRIPT_BEGIN_SLOT, 0x64) + mstore(TRANSCRIPT_STATE_1_SLOT, newState1) + mstore(TRANSCRIPT_STATE_0_SLOT, newState0) + } + + /** + * @dev Generates a new challenge with (uint32(2) || state_0 || state_1 || uint32(challenge_counter)) + * The challenge_counter is incremented after every challenge + */ + function get_challenge(challenge_counter) -> challenge { + mstore8(TRANSCRIPT_DST_BYTE_SLOT, 0x02) + mstore(TRANSCRIPT_CHALLENGE_SLOT, shl(224, challenge_counter)) + challenge := and(keccak256(TRANSCRIPT_BEGIN_SLOT, 0x48), FR_MASK) + } + + /** + * @dev Performs scalar multiplication: point * scalar -> t + * @notice Stores values starting from the initial free memory pointer i.e., 0x80. + * The free memory pointer is not updated as it stays unused throughout the code execution. + */ + function point_mul(p_x, p_y, s)-> t_x, t_y { + mstore(0x80, p_x) + mstore(0xa0, p_y) + mstore(0xc0, s) + + let success := staticcall(gas(), 7, 0x80, 0x60, 0x80, 0x40) + if iszero(success) { + revertWithMessage(27, "point multiplication failed") + } + t_x := mload(0x80) + t_y := mload(add(0x80, 0x20)) + } + + /** + * @dev Performs point addition: point 1 + point 2 -> t + * @notice Stores values starting from the initial free memory pointer i.e., 0x80. + * The free memory pointer is not updated as it stays unused throughout the code execution. + */ + function point_add(p1_x, p1_y, p2_x, p2_y) -> t_x, t_y { + mstore(0x80, p1_x) + mstore(0xa0, p1_y) + mstore(0xc0, p2_x) + mstore(0xe0, p2_y) + + let success := staticcall(gas(), 6, 0x80, 0x80, 0x80, 0x40) + if iszero(success) { + revertWithMessage(21, "point addition failed") + } + + t_x := mload(0x80) + t_y := mload(add(0x80, 0x20)) + } + + /** + * @dev Performs point subtraction: point 1 + point 2 -> t + * @notice Stores values starting from the initial free memory pointer i.e., 0x80. + * The free memory pointer is not updated as it stays unused throughout the code execution. + * @notice We don't consider the highly unlikely case where p2 can be a point-at-infinity and the function would revert. + */ + function point_sub(p1_x, p1_y, p2_x, p2_y) -> t_x, t_y { + mstore(0x80, p1_x) + mstore(0xa0, p1_y) + mstore(0xc0, p2_x) + mstore(0xe0, sub(Q_MOD, p2_y)) + + let success := staticcall(gas(), 6, 0x80, 0x80, 0x80, 0x40) + if iszero(success) { + revertWithMessage(24, "point subtraction failed") + } + + t_x := mload(0x80) + t_y := mload(add(0x80, 0x20)) + } + + /** + * @dev Calculates EC Pairing result following the EIP-197: https://eips.ethereum.org/EIPS/eip-197 + * Performs point negation before pairing calculation, if the flag `is_zero_commitment` is true + * + * @notice Stores values starting from the initial free memory pointer i.e., 0x80. + * The free memory pointer is not updated as it stays unused throughout the code execution. + * While code reformatting consider not to overwrite the first constant-defined memory location, which is currently + * TRANSCRIPT_BEGIN_SLOT = 0x200 + */ + function pairing_check(p1_x, p1_y, is_zero_commitment) -> res { + mstore(0x80, p1_x) + mstore(0xa0, p1_y) + mstore(0xc0, VK_G2_ELEMENT_0_X1) + mstore(0xe0, VK_G2_ELEMENT_0_X2) + mstore(0x100, VK_G2_ELEMENT_0_Y1) + mstore(0x120, VK_G2_ELEMENT_0_Y2) + mstore(0x140, mload(MEM_PROOF_COMMITMENT_3_G1_X)) + mstore(0x160, mload(MEM_PROOF_COMMITMENT_3_G1_Y)) + if iszero(is_zero_commitment){ + mstore(0x160, sub(Q_MOD, mload(MEM_PROOF_COMMITMENT_3_G1_Y))) + } + mstore(0x180, VK_G2_ELEMENT_1_X1) + mstore(0x1a0, VK_G2_ELEMENT_1_X2) + mstore(0x1c0, VK_G2_ELEMENT_1_Y1) + mstore(0x1e0, VK_G2_ELEMENT_1_Y2) + + let success := staticcall(gas(), 8, 0x80, mul(12, 0x20), 0x80, 0x20) + + if iszero(success) { + revertWithMessage(20, "pairing check failed") + } + res := mload(0x80) + } + + /** + * @dev Reverts with the desired custom error string. + * @notice Stores values starting from the initial free memory pointer i.e., 0x80. + * The free memory pointer is not updated as it stays unused throughout the code execution. + */ + function revertWithMessage(len, reason) { + // "Error(string)" signature: bytes32(bytes4(keccak256("Error(string)"))) + mstore(0x80, 0x08c379a000000000000000000000000000000000000000000000000000000000) + // Data offset + mstore(0x84, 0x0000000000000000000000000000000000000000000000000000000000000020) + // Length of revert string + mstore(0xa4, len) + // Revert reason + mstore(0xc4, reason) + // Revert + revert(0x80, 0x64) + } + + /** + * @dev Performs modular exponentiation using the formula (value ^ power) mod R_MOD. + * @notice Stores values starting from the initial free memory pointer i.e., 0x80. + * The free memory pointer is not updated as it stays unused throughout the code execution. + */ + function modexp(value, power) -> res { + mstore(0x80, 0x20) + mstore(0xa0, 0x20) + mstore(0xc0, 0x20) + mstore(0xe0, value) + mstore(0x100, power) + mstore(0x120, R_MOD) + if iszero(staticcall(gas(), 5, 0x80, 0xc0, 0x80, 0x20)) { + revertWithMessage(24, "modexp precompile failed") + } + res := mload(0x80) + } + } + + } +} \ No newline at end of file diff --git a/l1-contracts/contracts/state-transition/chain-deps/DiamondInit.sol b/l1-contracts/contracts/state-transition/chain-deps/DiamondInit.sol index 424223396..32d716a9a 100644 --- a/l1-contracts/contracts/state-transition/chain-deps/DiamondInit.sol +++ b/l1-contracts/contracts/state-transition/chain-deps/DiamondInit.sol @@ -3,39 +3,68 @@ pragma solidity 0.8.24; import {Diamond} from "../libraries/Diamond.sol"; -import {ZkSyncHyperchainBase} from "./facets/ZkSyncHyperchainBase.sol"; +import {ZKChainBase} from "./facets/ZKChainBase.sol"; import {L2_TO_L1_LOG_SERIALIZE_SIZE, MAX_GAS_PER_TRANSACTION} from "../../common/Config.sol"; import {InitializeData, IDiamondInit} from "../chain-interfaces/IDiamondInit.sol"; +import {PriorityQueue} from "../libraries/PriorityQueue.sol"; +import {PriorityTree} from "../libraries/PriorityTree.sol"; +import {ZeroAddress, TooMuchGas} from "../../common/L1ContractErrors.sol"; /// @author Matter Labs /// @dev The contract is used only once to initialize the diamond proxy. /// @dev The deployment process takes care of this contract's initialization. -contract DiamondInit is ZkSyncHyperchainBase, IDiamondInit { +contract DiamondInit is ZKChainBase, IDiamondInit { + using PriorityQueue for PriorityQueue.Queue; + using PriorityTree for PriorityTree.Tree; + /// @dev Initialize the implementation to prevent any possibility of a Parity hack. constructor() reentrancyGuardInitializer {} - /// @notice hyperchain diamond contract initialization + /// @notice ZK chain diamond contract initialization /// @return Magic 32 bytes, which indicates that the contract logic is expected to be used as a diamond proxy /// initializer function initialize(InitializeData calldata _initializeData) external reentrancyGuardInitializer returns (bytes32) { - require(address(_initializeData.verifier) != address(0), "vt"); - require(_initializeData.admin != address(0), "vy"); - require(_initializeData.validatorTimelock != address(0), "hc"); - require(_initializeData.priorityTxMaxGasLimit <= MAX_GAS_PER_TRANSACTION, "vu"); - require(_initializeData.bridgehub != address(0), "DiamondInit: b0"); - require(_initializeData.stateTransitionManager != address(0), "DiamondInit: stm0"); - require(_initializeData.baseToken != address(0), "DiamondInit: bt0"); - require(_initializeData.baseTokenBridge != address(0), "DiamondInit: btb0"); - require(_initializeData.blobVersionedHashRetriever != address(0), "DiamondInit: bvhr0"); + if (address(_initializeData.dualVerifier) == address(0)) { + revert ZeroAddress(); + } + if (_initializeData.plonkVerifier == address(0)) { + revert ZeroAddress(); + } + if (_initializeData.fflonkVerifier == address(0)) { + revert ZeroAddress(); + } + if (_initializeData.admin == address(0)) { + revert ZeroAddress(); + } + if (_initializeData.validatorTimelock == address(0)) { + revert ZeroAddress(); + } + if (_initializeData.priorityTxMaxGasLimit > MAX_GAS_PER_TRANSACTION) { + revert TooMuchGas(); + } + if (_initializeData.bridgehub == address(0)) { + revert ZeroAddress(); + } + if (_initializeData.chainTypeManager == address(0)) { + revert ZeroAddress(); + } + if (_initializeData.baseTokenAssetId == bytes32(0)) { + revert ZeroAddress(); + } + if (_initializeData.blobVersionedHashRetriever == address(0)) { + revert ZeroAddress(); + } s.chainId = _initializeData.chainId; s.bridgehub = _initializeData.bridgehub; - s.stateTransitionManager = _initializeData.stateTransitionManager; - s.baseToken = _initializeData.baseToken; - s.baseTokenBridge = _initializeData.baseTokenBridge; + s.chainTypeManager = _initializeData.chainTypeManager; + s.baseTokenAssetId = _initializeData.baseTokenAssetId; s.protocolVersion = _initializeData.protocolVersion; - s.verifier = _initializeData.verifier; + s.dualVerifier = _initializeData.dualVerifier; + s.plonkVerifier = _initializeData.plonkVerifier; + s.fflonkVerifier = _initializeData.fflonkVerifier; + s.fflonkProofLength = _initializeData.fflonkProofLength; s.admin = _initializeData.admin; s.validators[_initializeData.validatorTimelock] = true; @@ -46,6 +75,7 @@ contract DiamondInit is ZkSyncHyperchainBase, IDiamondInit { s.priorityTxMaxGasLimit = _initializeData.priorityTxMaxGasLimit; s.feeParams = _initializeData.feeParams; s.blobVersionedHashRetriever = _initializeData.blobVersionedHashRetriever; + s.priorityTree.setup(s.priorityQueue.getTotalPriorityTxs()); // While this does not provide a protection in the production, it is needed for local testing // Length of the L2Log encoding should not be equal to the length of other L2Logs' tree nodes preimages diff --git a/l1-contracts/contracts/state-transition/chain-deps/DiamondProxy.sol b/l1-contracts/contracts/state-transition/chain-deps/DiamondProxy.sol index 5cf26ac82..db29da126 100644 --- a/l1-contracts/contracts/state-transition/chain-deps/DiamondProxy.sol +++ b/l1-contracts/contracts/state-transition/chain-deps/DiamondProxy.sol @@ -2,6 +2,8 @@ pragma solidity 0.8.24; +// solhint-disable gas-custom-errors + import {Diamond} from "../libraries/Diamond.sol"; /// @title Diamond Proxy Contract (EIP-2535) diff --git a/l1-contracts/contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol b/l1-contracts/contracts/state-transition/chain-deps/ZKChainStorage.sol similarity index 81% rename from l1-contracts/contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol rename to l1-contracts/contracts/state-transition/chain-deps/ZKChainStorage.sol index e76d86f24..2e719c349 100644 --- a/l1-contracts/contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol +++ b/l1-contracts/contracts/state-transition/chain-deps/ZKChainStorage.sol @@ -3,7 +3,9 @@ pragma solidity 0.8.24; import {IVerifier, VerifierParams} from "../chain-interfaces/IVerifier.sol"; +// import {IChainTypeManager} from "../IChainTypeManager.sol"; import {PriorityQueue} from "../../state-transition/libraries/PriorityQueue.sol"; +import {PriorityTree} from "../../state-transition/libraries/PriorityTree.sol"; /// @notice Indicates whether an upgrade is initiated and if yes what type /// @param None Upgrade is NOT initiated @@ -58,22 +60,23 @@ struct FeeParams { uint64 minimalL2GasPrice; } -/// @dev storing all storage variables for hyperchain diamond facets +/// @dev storing all storage variables for ZK chain diamond facets /// NOTE: It is used in a proxy, so it is possible to add new variables to the end /// but NOT to modify already existing variables or change their order. /// NOTE: variables prefixed with '__DEPRECATED_' are deprecated and shouldn't be used. /// Their presence is maintained for compatibility and to prevent storage collision. -struct ZkSyncHyperchainStorage { +// solhint-disable-next-line gas-struct-packing +struct ZKChainStorage { /// @dev Storage of variables needed for deprecated diamond cut facet uint256[7] __DEPRECATED_diamondCutStorage; - /// @notice Address which will exercise critical changes to the Diamond Proxy (upgrades, freezing & unfreezing). Replaced by STM + /// @notice Address which will exercise critical changes to the Diamond Proxy (upgrades, freezing & unfreezing). Replaced by CTM address __DEPRECATED_governor; /// @notice Address that the governor proposed as one that will replace it address __DEPRECATED_pendingGovernor; /// @notice List of permitted validators mapping(address validatorAddress => bool isValidator) validators; - /// @dev Verifier contract. Used to verify aggregated proof for batches - IVerifier verifier; + /// @dev Dual Verifier contract. Wrapper contract that routes proof verification based on the proof type + IVerifier dualVerifier; /// @notice Total number of executed batches i.e. batches[totalBatchesExecuted] points at the latest executed batch /// (batch 0 is genesis) uint256 totalBatchesExecuted; @@ -131,23 +134,45 @@ struct ZkSyncHyperchainStorage { address pendingAdmin; /// @dev Fee params used to derive gasPrice for the L1->L2 transactions. For L2 transactions, /// the bootloader gives enough freedom to the operator. + /// @dev The value is only for the L1 deployment of the ZK Chain, since payment for all the priority transactions is + /// charged at that level. FeeParams feeParams; /// @dev Address of the blob versioned hash getter smart contract used for EIP-4844 versioned hashes. + /// @dev Used only for testing. address blobVersionedHashRetriever; /// @dev The chainId of the chain uint256 chainId; /// @dev The address of the bridgehub address bridgehub; - /// @dev The address of the StateTransitionManager - address stateTransitionManager; + /// @dev The address of the ChainTypeManager + address chainTypeManager; /// @dev The address of the baseToken contract. Eth is address(1) - address baseToken; + address __DEPRECATED_baseToken; /// @dev The address of the baseTokenbridge. Eth also uses the shared bridge - address baseTokenBridge; + address __DEPRECATED_baseTokenBridge; /// @notice gasPriceMultiplier for each baseToken, so that each L1->L2 transaction pays for its transaction on the destination /// we multiply by the nominator, and divide by the denominator uint128 baseTokenGasPriceMultiplierNominator; uint128 baseTokenGasPriceMultiplierDenominator; /// @dev The optional address of the contract that has to be used for transaction filtering/whitelisting address transactionFilterer; + /// @dev The address of the l1DAValidator contract. + /// This contract is responsible for the verification of the correctness of the DA on L1. + address l1DAValidator; + /// @dev The address of the contract on L2 that is responsible for the data availability verification. + /// This contract sends `l2DAValidatorOutputHash` to L1 via L2->L1 system log and it will routed to the `l1DAValidator` contract. + address l2DAValidator; + /// @dev the Asset Id of the baseToken + bytes32 baseTokenAssetId; + /// @dev If this ZKchain settles on this chain, then this is zero. Otherwise it is the address of the ZKchain that is a + /// settlement layer for this ZKchain. (think about it as a 'forwarding' address for the chain that migrated away). + address settlementLayer; + /// @dev Priority tree, the new data structure for priority queue + PriorityTree.Tree priorityTree; + /// @dev The address of the PLONK Verifier contract + address plonkVerifier; + /// @dev The address of the FFLONK Verifier contract + address fflonkVerifier; + /// @dev The length of the FFLONK proof type + uint256 fflonkProofLength; } diff --git a/l1-contracts/contracts/state-transition/chain-deps/facets/Admin.sol b/l1-contracts/contracts/state-transition/chain-deps/facets/Admin.sol index 7eb6e7904..27bbe3155 100644 --- a/l1-contracts/contracts/state-transition/chain-deps/facets/Admin.sol +++ b/l1-contracts/contracts/state-transition/chain-deps/facets/Admin.sol @@ -2,23 +2,45 @@ pragma solidity 0.8.24; +// solhint-disable gas-custom-errors, reason-string + import {IAdmin} from "../../chain-interfaces/IAdmin.sol"; import {Diamond} from "../../libraries/Diamond.sol"; -import {MAX_GAS_PER_TRANSACTION} from "../../../common/Config.sol"; -import {FeeParams, PubdataPricingMode} from "../ZkSyncHyperchainStorage.sol"; -import {ZkSyncHyperchainBase} from "./ZkSyncHyperchainBase.sol"; -import {IStateTransitionManager} from "../../IStateTransitionManager.sol"; +import {MAX_GAS_PER_TRANSACTION, ZKChainCommitment} from "../../../common/Config.sol"; +import {FeeParams, PubdataPricingMode} from "../ZKChainStorage.sol"; +import {PriorityTree} from "../../../state-transition/libraries/PriorityTree.sol"; +import {PriorityQueue} from "../../../state-transition/libraries/PriorityQueue.sol"; +import {ZKChainBase} from "./ZKChainBase.sol"; +import {IChainTypeManager} from "../../IChainTypeManager.sol"; +import {IL1GenesisUpgrade} from "../../../upgrades/IL1GenesisUpgrade.sol"; +import {Unauthorized, TooMuchGas, PriorityTxPubdataExceedsMaxPubDataPerBatch, InvalidPubdataPricingMode, ProtocolIdMismatch, ChainAlreadyLive, HashMismatch, ProtocolIdNotGreater, DenominatorIsZero, DiamondAlreadyFrozen, DiamondNotFrozen} from "../../../common/L1ContractErrors.sol"; // While formally the following import is not used, it is needed to inherit documentation from it -import {IZkSyncHyperchainBase} from "../../chain-interfaces/IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "../../chain-interfaces/IZKChainBase.sol"; /// @title Admin Contract controls access rights for contract management. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -contract AdminFacet is ZkSyncHyperchainBase, IAdmin { - /// @inheritdoc IZkSyncHyperchainBase +contract AdminFacet is ZKChainBase, IAdmin { + using PriorityTree for PriorityTree.Tree; + using PriorityQueue for PriorityQueue.Queue; + + /// @inheritdoc IZKChainBase string public constant override getName = "AdminFacet"; + /// @notice The chain id of L1. This contract can be deployed on multiple layers, but this value is still equal to the + /// L1 that is at the most base layer. + uint256 internal immutable L1_CHAIN_ID; + + constructor(uint256 _l1ChainId) { + L1_CHAIN_ID = _l1ChainId; + } + + modifier onlyL1() { + require(block.chainid == L1_CHAIN_ID, "AdminFacet: not L1"); + _; + } + /// @inheritdoc IAdmin function setPendingAdmin(address _newPendingAdmin) external onlyAdmin { // Save previous value into the stack to put it into the event later @@ -31,7 +53,10 @@ contract AdminFacet is ZkSyncHyperchainBase, IAdmin { /// @inheritdoc IAdmin function acceptAdmin() external { address pendingAdmin = s.pendingAdmin; - require(msg.sender == pendingAdmin, "n4"); // Only proposed by current admin address can claim the admin rights + // Only proposed by current admin address can claim the admin rights + if (msg.sender != pendingAdmin) { + revert Unauthorized(msg.sender); + } address previousAdmin = s.admin; s.admin = pendingAdmin; @@ -42,21 +67,23 @@ contract AdminFacet is ZkSyncHyperchainBase, IAdmin { } /// @inheritdoc IAdmin - function setValidator(address _validator, bool _active) external onlyStateTransitionManager { + function setValidator(address _validator, bool _active) external onlyChainTypeManager { s.validators[_validator] = _active; emit ValidatorStatusUpdate(_validator, _active); } /// @inheritdoc IAdmin - function setPorterAvailability(bool _zkPorterIsAvailable) external onlyStateTransitionManager { + function setPorterAvailability(bool _zkPorterIsAvailable) external onlyChainTypeManager { // Change the porter availability s.zkPorterIsAvailable = _zkPorterIsAvailable; emit IsPorterAvailableStatusUpdate(_zkPorterIsAvailable); } /// @inheritdoc IAdmin - function setPriorityTxMaxGasLimit(uint256 _newPriorityTxMaxGasLimit) external onlyStateTransitionManager { - require(_newPriorityTxMaxGasLimit <= MAX_GAS_PER_TRANSACTION, "n5"); + function setPriorityTxMaxGasLimit(uint256 _newPriorityTxMaxGasLimit) external onlyChainTypeManager { + if (_newPriorityTxMaxGasLimit > MAX_GAS_PER_TRANSACTION) { + revert TooMuchGas(); + } uint256 oldPriorityTxMaxGasLimit = s.priorityTxMaxGasLimit; s.priorityTxMaxGasLimit = _newPriorityTxMaxGasLimit; @@ -64,14 +91,19 @@ contract AdminFacet is ZkSyncHyperchainBase, IAdmin { } /// @inheritdoc IAdmin - function changeFeeParams(FeeParams calldata _newFeeParams) external onlyAdminOrStateTransitionManager { + function changeFeeParams(FeeParams calldata _newFeeParams) external onlyAdminOrChainTypeManager onlyL1 { // Double checking that the new fee params are valid, i.e. // the maximal pubdata per batch is not less than the maximal pubdata per priority transaction. - require(_newFeeParams.maxPubdataPerBatch >= _newFeeParams.priorityTxMaxPubdata, "n6"); + if (_newFeeParams.maxPubdataPerBatch < _newFeeParams.priorityTxMaxPubdata) { + revert PriorityTxPubdataExceedsMaxPubDataPerBatch(); + } FeeParams memory oldFeeParams = s.feeParams; - require(_newFeeParams.pubdataPricingMode == oldFeeParams.pubdataPricingMode, "n7"); // we cannot change pubdata pricing mode + // we cannot change pubdata pricing mode + if (_newFeeParams.pubdataPricingMode != oldFeeParams.pubdataPricingMode) { + revert InvalidPubdataPricingMode(); + } s.feeParams = _newFeeParams; @@ -79,8 +111,10 @@ contract AdminFacet is ZkSyncHyperchainBase, IAdmin { } /// @inheritdoc IAdmin - function setTokenMultiplier(uint128 _nominator, uint128 _denominator) external onlyAdminOrStateTransitionManager { - require(_denominator != 0, "AF: denominator 0"); + function setTokenMultiplier(uint128 _nominator, uint128 _denominator) external onlyAdminOrChainTypeManager { + if (_denominator == 0) { + revert DenominatorIsZero(); + } uint128 oldNominator = s.baseTokenGasPriceMultiplierNominator; uint128 oldDenominator = s.baseTokenGasPriceMultiplierDenominator; @@ -91,18 +125,41 @@ contract AdminFacet is ZkSyncHyperchainBase, IAdmin { } /// @inheritdoc IAdmin - function setPubdataPricingMode(PubdataPricingMode _pricingMode) external onlyAdmin { - require(s.totalBatchesCommitted == 0, "AdminFacet: set validium only after genesis"); // Validium mode can be set only before the first batch is processed + function setPubdataPricingMode(PubdataPricingMode _pricingMode) external onlyAdmin onlyL1 { + // Validium mode can be set only before the first batch is processed + if (s.totalBatchesCommitted != 0) { + revert ChainAlreadyLive(); + } s.feeParams.pubdataPricingMode = _pricingMode; emit ValidiumModeStatusUpdate(_pricingMode); } - function setTransactionFilterer(address _transactionFilterer) external onlyAdmin { + /// @inheritdoc IAdmin + function setTransactionFilterer(address _transactionFilterer) external onlyAdmin onlyL1 { address oldTransactionFilterer = s.transactionFilterer; s.transactionFilterer = _transactionFilterer; emit NewTransactionFilterer(oldTransactionFilterer, _transactionFilterer); } + /// @notice Sets the DA validator pair with the given addresses. + /// @dev It does not check for these addresses to be non-zero, since when migrating to a new settlement + /// layer, we set them to zero. + function _setDAValidatorPair(address _l1DAValidator, address _l2DAValidator) internal { + emit NewL1DAValidator(s.l1DAValidator, _l1DAValidator); + emit NewL2DAValidator(s.l2DAValidator, _l2DAValidator); + + s.l1DAValidator = _l1DAValidator; + s.l2DAValidator = _l2DAValidator; + } + + /// @inheritdoc IAdmin + function setDAValidatorPair(address _l1DAValidator, address _l2DAValidator) external onlyAdmin { + require(_l1DAValidator != address(0), "AdminFacet: L1DAValidator address is zero"); + require(_l2DAValidator != address(0), "AdminFacet: L2DAValidator address is zero"); + + _setDAValidatorPair(_l1DAValidator, _l2DAValidator); + } + /*////////////////////////////////////////////////////////////// UPGRADE EXECUTION //////////////////////////////////////////////////////////////*/ @@ -111,46 +168,233 @@ contract AdminFacet is ZkSyncHyperchainBase, IAdmin { function upgradeChainFromVersion( uint256 _oldProtocolVersion, Diamond.DiamondCutData calldata _diamondCut - ) external onlyAdminOrStateTransitionManager { + ) external onlyAdminOrChainTypeManager { bytes32 cutHashInput = keccak256(abi.encode(_diamondCut)); - require( - cutHashInput == IStateTransitionManager(s.stateTransitionManager).upgradeCutHash(_oldProtocolVersion), - "AdminFacet: cutHash mismatch" - ); - - require(s.protocolVersion == _oldProtocolVersion, "AdminFacet: protocolVersion mismatch in STC when upgrading"); + bytes32 upgradeCutHash = IChainTypeManager(s.chainTypeManager).upgradeCutHash(_oldProtocolVersion); + if (cutHashInput != upgradeCutHash) { + revert HashMismatch(upgradeCutHash, cutHashInput); + } + + if (s.protocolVersion != _oldProtocolVersion) { + revert ProtocolIdMismatch(s.protocolVersion, _oldProtocolVersion); + } Diamond.diamondCut(_diamondCut); emit ExecuteUpgrade(_diamondCut); - require(s.protocolVersion > _oldProtocolVersion, "AdminFacet: protocolVersion mismatch in STC after upgrading"); + if (s.protocolVersion <= _oldProtocolVersion) { + revert ProtocolIdNotGreater(); + } } /// @inheritdoc IAdmin - function executeUpgrade(Diamond.DiamondCutData calldata _diamondCut) external onlyStateTransitionManager { + function executeUpgrade(Diamond.DiamondCutData calldata _diamondCut) external onlyChainTypeManager { Diamond.diamondCut(_diamondCut); emit ExecuteUpgrade(_diamondCut); } + /// @dev we have to set the chainId at genesis, as blockhashzero is the same for all chains with the same chainId + function genesisUpgrade( + address _l1GenesisUpgrade, + address _ctmDeployer, + bytes calldata _forceDeploymentData, + bytes[] calldata _factoryDeps + ) external onlyChainTypeManager { + Diamond.FacetCut[] memory emptyArray; + Diamond.DiamondCutData memory cutData = Diamond.DiamondCutData({ + facetCuts: emptyArray, + initAddress: _l1GenesisUpgrade, + initCalldata: abi.encodeCall( + IL1GenesisUpgrade.genesisUpgrade, + (_l1GenesisUpgrade, s.chainId, s.protocolVersion, _ctmDeployer, _forceDeploymentData, _factoryDeps) + ) + }); + + Diamond.diamondCut(cutData); + } + /*////////////////////////////////////////////////////////////// CONTRACT FREEZING //////////////////////////////////////////////////////////////*/ /// @inheritdoc IAdmin - function freezeDiamond() external onlyStateTransitionManager { + function freezeDiamond() external onlyChainTypeManager { Diamond.DiamondStorage storage diamondStorage = Diamond.getDiamondStorage(); - require(!diamondStorage.isFrozen, "a9"); // diamond proxy is frozen already + // diamond proxy is frozen already + if (diamondStorage.isFrozen) { + revert DiamondAlreadyFrozen(); + } diamondStorage.isFrozen = true; emit Freeze(); } /// @inheritdoc IAdmin - function unfreezeDiamond() external onlyStateTransitionManager { + function unfreezeDiamond() external onlyChainTypeManager { Diamond.DiamondStorage storage diamondStorage = Diamond.getDiamondStorage(); - require(diamondStorage.isFrozen, "a7"); // diamond proxy is not frozen + // diamond proxy is not frozen + if (!diamondStorage.isFrozen) { + revert DiamondNotFrozen(); + } diamondStorage.isFrozen = false; emit Unfreeze(); } + + /*////////////////////////////////////////////////////////////// + CHAIN MIGRATION + //////////////////////////////////////////////////////////////*/ + + /// @inheritdoc IAdmin + function forwardedBridgeBurn( + address _settlementLayer, + address _originalCaller, + bytes calldata _data + ) external payable override onlyBridgehub returns (bytes memory chainBridgeMintData) { + require(s.settlementLayer == address(0), "Af: already migrated"); + require(_originalCaller == s.admin, "Af: not chainAdmin"); + // As of now all we need in this function is the chainId so we encode it and pass it down in the _chainData field + uint256 protocolVersion = abi.decode(_data, (uint256)); + + uint256 currentProtocolVersion = s.protocolVersion; + + require(currentProtocolVersion == protocolVersion, "CTM: protocolVersion not up to date"); + + if (block.chainid != L1_CHAIN_ID) { + // We assume that GW -> L1 transactions can never fail and provide no recovery mechanism from it. + // That's why we need to bound the gas that can be consumed during such a migration. + require(s.totalBatchesCommitted == s.totalBatchesExecuted, "Af: not all batches executed"); + } + + s.settlementLayer = _settlementLayer; + chainBridgeMintData = abi.encode(prepareChainCommitment()); + } + + /// @inheritdoc IAdmin + function forwardedBridgeMint( + bytes calldata _data, + bool _contractAlreadyDeployed + ) external payable override onlyBridgehub { + ZKChainCommitment memory _commitment = abi.decode(_data, (ZKChainCommitment)); + + IChainTypeManager ctm = IChainTypeManager(s.chainTypeManager); + + uint256 currentProtocolVersion = s.protocolVersion; + uint256 protocolVersion = ctm.protocolVersion(); + require(currentProtocolVersion == protocolVersion, "CTM: protocolVersion not up to date"); + + uint256 batchesExecuted = _commitment.totalBatchesExecuted; + uint256 batchesVerified = _commitment.totalBatchesVerified; + uint256 batchesCommitted = _commitment.totalBatchesCommitted; + + s.totalBatchesCommitted = batchesCommitted; + s.totalBatchesVerified = batchesVerified; + s.totalBatchesExecuted = batchesExecuted; + + // Some consistency checks just in case. + require(batchesExecuted <= batchesVerified, "Executed is not consistent with verified"); + require(batchesVerified <= batchesCommitted, "Verified is not consistent with committed"); + + // In the worst case, we may need to revert all the committed batches that were not executed. + // This means that the stored batch hashes should be stored for [batchesExecuted; batchesCommitted] batches, i.e. + // there should be batchesCommitted - batchesExecuted + 1 hashes. + require( + _commitment.batchHashes.length == batchesCommitted - batchesExecuted + 1, + "Invalid number of batch hashes" + ); + + // Note that this part is done in O(N), i.e. it is the responsibility of the admin of the chain to ensure that the total number of + // outstanding committed batches is not too long. + uint256 length = _commitment.batchHashes.length; + for (uint256 i = 0; i < length; ++i) { + s.storedBatchHashes[batchesExecuted + i] = _commitment.batchHashes[i]; + } + + if (block.chainid == L1_CHAIN_ID) { + // L1 PTree contains all L1->L2 transactions. + require( + s.priorityTree.isHistoricalRoot( + _commitment.priorityTree.sides[_commitment.priorityTree.sides.length - 1] + ), + "Admin: not historical root" + ); + require(_contractAlreadyDeployed, "Af: contract not deployed"); + require(s.settlementLayer != address(0), "Af: not migrated"); + s.priorityTree.checkL1Reinit(_commitment.priorityTree); + } else if (_contractAlreadyDeployed) { + require(s.settlementLayer != address(0), "Af: not migrated 2"); + s.priorityTree.checkGWReinit(_commitment.priorityTree); + s.priorityTree.initFromCommitment(_commitment.priorityTree); + } else { + s.priorityTree.initFromCommitment(_commitment.priorityTree); + } + + s.l2SystemContractsUpgradeTxHash = _commitment.l2SystemContractsUpgradeTxHash; + s.l2SystemContractsUpgradeBatchNumber = _commitment.l2SystemContractsUpgradeBatchNumber; + + // Set the settlement to 0 - as this is the current settlement chain. + s.settlementLayer = address(0); + + _setDAValidatorPair(address(0), address(0)); + + emit MigrationComplete(); + } + + /// @inheritdoc IAdmin + /// @dev Note that this function does not check that the caller is the chain admin. + function forwardedBridgeRecoverFailedTransfer( + uint256 /* _chainId */, + bytes32 /* _assetInfo */, + address _depositSender, + bytes calldata _chainData + ) external payable override onlyBridgehub { + // As of now all we need in this function is the chainId so we encode it and pass it down in the _chainData field + uint256 protocolVersion = abi.decode(_chainData, (uint256)); + + require(s.settlementLayer != address(0), "Af: not migrated"); + // Sanity check that the _depositSender is the chain admin. + require(_depositSender == s.admin, "Af: not chainAdmin"); + + uint256 currentProtocolVersion = s.protocolVersion; + + require(currentProtocolVersion == protocolVersion, "CTM: protocolVersion not up to date"); + + s.settlementLayer = address(0); + } + + /// @notice Returns the commitment for a chain. + /// @dev Note, that this is a getter method helpful for debugging and should not be relied upon by clients. + /// @return commitment The commitment for the chain. + function prepareChainCommitment() public view returns (ZKChainCommitment memory commitment) { + require(s.priorityQueue.getFirstUnprocessedPriorityTx() >= s.priorityTree.startIndex, "PQ not ready"); + + commitment.totalBatchesCommitted = s.totalBatchesCommitted; + commitment.totalBatchesVerified = s.totalBatchesVerified; + commitment.totalBatchesExecuted = s.totalBatchesExecuted; + commitment.l2SystemContractsUpgradeBatchNumber = s.l2SystemContractsUpgradeBatchNumber; + commitment.l2SystemContractsUpgradeTxHash = s.l2SystemContractsUpgradeTxHash; + commitment.priorityTree = s.priorityTree.getCommitment(); + + // just in case + require( + commitment.totalBatchesExecuted <= commitment.totalBatchesVerified, + "Verified is not consistent with executed" + ); + require( + commitment.totalBatchesVerified <= commitment.totalBatchesCommitted, + "Verified is not consistent with committed" + ); + + uint256 blocksToRemember = commitment.totalBatchesCommitted - commitment.totalBatchesExecuted + 1; + + bytes32[] memory batchHashes = new bytes32[](blocksToRemember); + + for (uint256 i = 0; i < blocksToRemember; ++i) { + unchecked { + batchHashes[i] = s.storedBatchHashes[commitment.totalBatchesExecuted + i]; + } + } + + commitment.batchHashes = batchHashes; + } } diff --git a/l1-contracts/contracts/state-transition/chain-deps/facets/Executor.sol b/l1-contracts/contracts/state-transition/chain-deps/facets/Executor.sol index cb43a5880..87c051d19 100644 --- a/l1-contracts/contracts/state-transition/chain-deps/facets/Executor.sol +++ b/l1-contracts/contracts/state-transition/chain-deps/facets/Executor.sol @@ -2,82 +2,79 @@ pragma solidity 0.8.24; -import {ZkSyncHyperchainBase} from "./ZkSyncHyperchainBase.sol"; -import {COMMIT_TIMESTAMP_NOT_OLDER, COMMIT_TIMESTAMP_APPROXIMATION_DELTA, EMPTY_STRING_KECCAK, L2_TO_L1_LOG_SERIALIZE_SIZE, MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, PACKED_L2_BLOCK_TIMESTAMP_MASK, PUBLIC_INPUT_SHIFT, POINT_EVALUATION_PRECOMPILE_ADDR} from "../../../common/Config.sol"; -import {IExecutor, L2_LOG_ADDRESS_OFFSET, L2_LOG_KEY_OFFSET, L2_LOG_VALUE_OFFSET, SystemLogKey, LogProcessingOutput, PubdataSource, BLS_MODULUS, PUBDATA_COMMITMENT_SIZE, PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET, PUBDATA_COMMITMENT_COMMITMENT_OFFSET, MAX_NUMBER_OF_BLOBS, TOTAL_BLOBS_IN_COMMITMENT, BLOB_SIZE_BYTES} from "../../chain-interfaces/IExecutor.sol"; +// solhint-disable gas-custom-errors, reason-string + +import {ZKChainBase} from "./ZKChainBase.sol"; +import {IBridgehub} from "../../../bridgehub/IBridgehub.sol"; +import {IMessageRoot} from "../../../bridgehub/IMessageRoot.sol"; +import {COMMIT_TIMESTAMP_NOT_OLDER, COMMIT_TIMESTAMP_APPROXIMATION_DELTA, EMPTY_STRING_KECCAK, L2_TO_L1_LOG_SERIALIZE_SIZE, MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, PACKED_L2_BLOCK_TIMESTAMP_MASK, PUBLIC_INPUT_SHIFT} from "../../../common/Config.sol"; +import {IExecutor, L2_LOG_ADDRESS_OFFSET, L2_LOG_KEY_OFFSET, L2_LOG_VALUE_OFFSET, SystemLogKey, LogProcessingOutput, TOTAL_BLOBS_IN_COMMITMENT} from "../../chain-interfaces/IExecutor.sol"; import {PriorityQueue, PriorityOperation} from "../../libraries/PriorityQueue.sol"; +import {BatchDecoder} from "../../libraries/BatchDecoder.sol"; import {UncheckedMath} from "../../../common/libraries/UncheckedMath.sol"; import {UnsafeBytes} from "../../../common/libraries/UnsafeBytes.sol"; -import {L2_BOOTLOADER_ADDRESS, L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, L2_PUBDATA_CHUNK_PUBLISHER_ADDR} from "../../../common/L2ContractAddresses.sol"; -import {PubdataPricingMode} from "../ZkSyncHyperchainStorage.sol"; -import {IStateTransitionManager} from "../../IStateTransitionManager.sol"; +import {L2_BOOTLOADER_ADDRESS, L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR} from "../../../common/L2ContractAddresses.sol"; +import {IChainTypeManager} from "../../IChainTypeManager.sol"; +import {PriorityTree, PriorityOpsBatchInfo} from "../../libraries/PriorityTree.sol"; +import {IL1DAValidator, L1DAValidatorOutput} from "../../chain-interfaces/IL1DAValidator.sol"; +import {MissingSystemLogs, BatchNumberMismatch, TimeNotReached, ValueMismatch, HashMismatch, NonIncreasingTimestamp, TimestampError, InvalidLogSender, TxHashMismatch, UnexpectedSystemLog, LogAlreadyProcessed, InvalidProtocolVersion, CanOnlyProcessOneBatch, BatchHashMismatch, UpgradeBatchNumberIsNotZero, NonSequentialBatch, CantExecuteUnprovenBatches, SystemLogsSizeTooBig, InvalidNumberOfBlobs, VerifiedBatchesExceedsCommittedBatches, InvalidProof, RevertedBatchNotAfterNewLastBatch, CantRevertExecutedBatch, L2TimestampTooBig, PriorityOperationsRollingHashMismatch, DelegateCallFailed} from "../../../common/L1ContractErrors.sol"; // While formally the following import is not used, it is needed to inherit documentation from it -import {IZkSyncHyperchainBase} from "../../chain-interfaces/IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "../../chain-interfaces/IZKChainBase.sol"; -/// @title zkSync hyperchain Executor contract capable of processing events emitted in the zkSync hyperchain protocol. +/// @title ZK chain Executor contract capable of processing events emitted in the ZK chain protocol. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { +contract ExecutorFacet is ZKChainBase, IExecutor { using UncheckedMath for uint256; using PriorityQueue for PriorityQueue.Queue; + using PriorityTree for PriorityTree.Tree; - /// @inheritdoc IZkSyncHyperchainBase + /// @inheritdoc IZKChainBase string public constant override getName = "ExecutorFacet"; + /// @dev Checks that the chain is connected to the current bridehub and not migrated away. + modifier chainOnCurrentBridgehub() { + require(s.settlementLayer == address(0), "Chain was migrated"); + _; + } + /// @dev Process one batch commit using the previous batch StoredBatchInfo /// @dev returns new batch StoredBatchInfo /// @notice Does not change storage function _commitOneBatch( StoredBatchInfo memory _previousBatch, - CommitBatchInfo calldata _newBatch, + CommitBatchInfo memory _newBatch, bytes32 _expectedSystemContractUpgradeTxHash - ) internal view returns (StoredBatchInfo memory) { - require(_newBatch.batchNumber == _previousBatch.batchNumber + 1, "f"); // only commit next batch - - uint8 pubdataSource = uint8(bytes1(_newBatch.pubdataCommitments[0])); - PubdataPricingMode pricingMode = s.feeParams.pubdataPricingMode; - require( - pricingMode == PubdataPricingMode.Validium || - pubdataSource == uint8(PubdataSource.Calldata) || - pubdataSource == uint8(PubdataSource.Blob), - "us" - ); + ) internal returns (StoredBatchInfo memory) { + // only commit next batch + if (_newBatch.batchNumber != _previousBatch.batchNumber + 1) { + revert BatchNumberMismatch(_previousBatch.batchNumber + 1, _newBatch.batchNumber); + } - // Check that batch contain all meta information for L2 logs. + // Check that batch contains all meta information for L2 logs. // Get the chained hash of priority transaction hashes. LogProcessingOutput memory logOutput = _processL2Logs(_newBatch, _expectedSystemContractUpgradeTxHash); - bytes32[] memory blobCommitments = new bytes32[](MAX_NUMBER_OF_BLOBS); - if (pricingMode == PubdataPricingMode.Validium) { - // skipping data validation for validium, we just check that the data is empty - require(_newBatch.pubdataCommitments.length == 1, "EF: v0l"); - for (uint8 i = uint8(SystemLogKey.BLOB_ONE_HASH_KEY); i <= uint8(SystemLogKey.BLOB_SIX_HASH_KEY); i++) { - logOutput.blobHashes[i - uint8(SystemLogKey.BLOB_ONE_HASH_KEY)] = bytes32(0); - } - } else if (pubdataSource == uint8(PubdataSource.Blob)) { - // In this scenario, pubdataCommitments is a list of: opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes)) = 144 bytes - blobCommitments = _verifyBlobInformation(_newBatch.pubdataCommitments[1:], logOutput.blobHashes); - } else if (pubdataSource == uint8(PubdataSource.Calldata)) { - // In this scenario pubdataCommitments is actual pubdata consisting of l2 to l1 logs, l2 to l1 message, compressed smart contract bytecode, and compressed state diffs - require(_newBatch.pubdataCommitments.length <= BLOB_SIZE_BYTES, "cz"); - require( - logOutput.pubdataHash == - keccak256(_newBatch.pubdataCommitments[1:_newBatch.pubdataCommitments.length - 32]), - "wp" - ); - blobCommitments[0] = bytes32( - _newBatch.pubdataCommitments[_newBatch.pubdataCommitments.length - 32:_newBatch - .pubdataCommitments - .length] - ); - } + L1DAValidatorOutput memory daOutput = IL1DAValidator(s.l1DAValidator).checkDA({ + _chainId: s.chainId, + _batchNumber: uint256(_newBatch.batchNumber), + _l2DAValidatorOutputHash: logOutput.l2DAValidatorOutputHash, + _operatorDAInput: _newBatch.operatorDAInput, + _maxBlobsSupported: TOTAL_BLOBS_IN_COMMITMENT + }); - require(_previousBatch.batchHash == logOutput.previousBatchHash, "l"); + if (_previousBatch.batchHash != logOutput.previousBatchHash) { + revert HashMismatch(logOutput.previousBatchHash, _previousBatch.batchHash); + } // Check that the priority operation hash in the L2 logs is as expected - require(logOutput.chainedPriorityTxsHash == _newBatch.priorityOperationsHash, "t"); + if (logOutput.chainedPriorityTxsHash != _newBatch.priorityOperationsHash) { + revert HashMismatch(logOutput.chainedPriorityTxsHash, _newBatch.priorityOperationsHash); + } // Check that the number of processed priority operations is as expected - require(logOutput.numberOfLayer1Txs == _newBatch.numberOfLayer1Txs, "ta"); + if (logOutput.numberOfLayer1Txs != _newBatch.numberOfLayer1Txs) { + revert ValueMismatch(logOutput.numberOfLayer1Txs, _newBatch.numberOfLayer1Txs); + } // Check the timestamp of the new batch _verifyBatchTimestamp(logOutput.packedBatchAndL2BlockTimestamp, _newBatch.timestamp, _previousBatch.timestamp); @@ -85,9 +82,9 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { // Create batch commitment for the proof verification bytes32 commitment = _createBatchCommitment( _newBatch, - logOutput.stateDiffHash, - blobCommitments, - logOutput.blobHashes + daOutput.stateDiffHash, + daOutput.blobsOpeningCommitments, + daOutput.blobsLinearHashes ); return @@ -114,11 +111,15 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { ) internal view { // Check that the timestamp that came from the system context is expected uint256 batchTimestamp = _packedBatchAndL2BlockTimestamp >> 128; - require(batchTimestamp == _expectedBatchTimestamp, "tb"); + if (batchTimestamp != _expectedBatchTimestamp) { + revert TimestampError(); + } // While the fact that _previousBatchTimestamp < batchTimestamp is already checked on L2, // we double check it here for clarity - require(_previousBatchTimestamp < batchTimestamp, "h3"); + if (_previousBatchTimestamp >= batchTimestamp) { + revert NonIncreasingTimestamp(); + } uint256 lastL2BlockTimestamp = _packedBatchAndL2BlockTimestamp & PACKED_L2_BLOCK_TIMESTAMP_MASK; @@ -126,8 +127,14 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { // So here we need to only double check that: // - The timestamp of the batch is not too small. // - The timestamp of the last L2 block is not too big. - require(block.timestamp - COMMIT_TIMESTAMP_NOT_OLDER <= batchTimestamp, "h1"); // New batch timestamp is too small - require(lastL2BlockTimestamp <= block.timestamp + COMMIT_TIMESTAMP_APPROXIMATION_DELTA, "h2"); // The last L2 block timestamp is too big + // New batch timestamp is too small + if (block.timestamp - COMMIT_TIMESTAMP_NOT_OLDER > batchTimestamp) { + revert TimeNotReached(batchTimestamp, block.timestamp - COMMIT_TIMESTAMP_NOT_OLDER); + } + // The last L2 block timestamp is too big + if (lastL2BlockTimestamp > block.timestamp + COMMIT_TIMESTAMP_APPROXIMATION_DELTA) { + revert L2TimestampTooBig(); + } } /// @dev Check that L2 logs are proper and batch contain all meta information for them @@ -135,20 +142,19 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { /// SystemLogKey enum in Constants.sol is processed per new batch. /// @dev Data returned from here will be used to form the batch commitment. function _processL2Logs( - CommitBatchInfo calldata _newBatch, + CommitBatchInfo memory _newBatch, bytes32 _expectedSystemContractUpgradeTxHash - ) internal pure returns (LogProcessingOutput memory logOutput) { + ) internal view returns (LogProcessingOutput memory logOutput) { // Copy L2 to L1 logs into memory. bytes memory emittedL2Logs = _newBatch.systemLogs; - logOutput.blobHashes = new bytes32[](MAX_NUMBER_OF_BLOBS); - // Used as bitmap to set/check log processing happens exactly once. // See SystemLogKey enum in Constants.sol for ordering. - uint256 processedLogs; + uint256 processedLogs = 0; // linear traversal of the logs - for (uint256 i = 0; i < emittedL2Logs.length; i = i.uncheckedAdd(L2_TO_L1_LOG_SERIALIZE_SIZE)) { + uint256 logsLength = emittedL2Logs.length; + for (uint256 i = 0; i < logsLength; i = i.uncheckedAdd(L2_TO_L1_LOG_SERIALIZE_SIZE)) { // Extract the values to be compared to/used such as the log sender, key, and value // slither-disable-next-line unused-return (address logSender, ) = UnsafeBytes.readAddress(emittedL2Logs, i + L2_LOG_ADDRESS_OFFSET); @@ -158,81 +164,78 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { (bytes32 logValue, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + L2_LOG_VALUE_OFFSET); // Ensure that the log hasn't been processed already - require(!_checkBit(processedLogs, uint8(logKey)), "kp"); + if (_checkBit(processedLogs, uint8(logKey))) { + revert LogAlreadyProcessed(uint8(logKey)); + } processedLogs = _setBit(processedLogs, uint8(logKey)); // Need to check that each log was sent by the correct address. if (logKey == uint256(SystemLogKey.L2_TO_L1_LOGS_TREE_ROOT_KEY)) { - require(logSender == L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, "lm"); + if (logSender != L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR) { + revert InvalidLogSender(logSender, logKey); + } logOutput.l2LogsTreeRoot = logValue; - } else if (logKey == uint256(SystemLogKey.TOTAL_L2_TO_L1_PUBDATA_KEY)) { - require(logSender == L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, "ln"); - logOutput.pubdataHash = logValue; - } else if (logKey == uint256(SystemLogKey.STATE_DIFF_HASH_KEY)) { - require(logSender == L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, "lb"); - logOutput.stateDiffHash = logValue; } else if (logKey == uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)) { - require(logSender == L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, "sc"); + if (logSender != L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR) { + revert InvalidLogSender(logSender, logKey); + } logOutput.packedBatchAndL2BlockTimestamp = uint256(logValue); } else if (logKey == uint256(SystemLogKey.PREV_BATCH_HASH_KEY)) { - require(logSender == L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, "sv"); + if (logSender != L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR) { + revert InvalidLogSender(logSender, logKey); + } logOutput.previousBatchHash = logValue; } else if (logKey == uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY)) { - require(logSender == L2_BOOTLOADER_ADDRESS, "bl"); + if (logSender != L2_BOOTLOADER_ADDRESS) { + revert InvalidLogSender(logSender, logKey); + } logOutput.chainedPriorityTxsHash = logValue; } else if (logKey == uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY)) { - require(logSender == L2_BOOTLOADER_ADDRESS, "bk"); + if (logSender != L2_BOOTLOADER_ADDRESS) { + revert InvalidLogSender(logSender, logKey); + } logOutput.numberOfLayer1Txs = uint256(logValue); - } else if ( - logKey >= uint256(SystemLogKey.BLOB_ONE_HASH_KEY) && logKey <= uint256(SystemLogKey.BLOB_SIX_HASH_KEY) - ) { - require(logSender == L2_PUBDATA_CHUNK_PUBLISHER_ADDR, "pc"); - uint8 blobNumber = uint8(logKey) - uint8(SystemLogKey.BLOB_ONE_HASH_KEY); - - // While the fact that `blobNumber` is a valid blob number is implicitly checked by the fact - // that Solidity provides array overflow protection, we still double check it manually in case - // we accidentally put `unchecked` at the top of the loop and generally for better error messages. - require(blobNumber < MAX_NUMBER_OF_BLOBS, "b6"); - logOutput.blobHashes[blobNumber] = logValue; + } else if (logKey == uint256(SystemLogKey.USED_L2_DA_VALIDATOR_ADDRESS_KEY)) { + if (logSender != L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR) { + revert InvalidLogSender(logSender, logKey); + } + require(s.l2DAValidator == address(uint160(uint256(logValue))), "lo"); + } else if (logKey == uint256(SystemLogKey.L2_DA_VALIDATOR_OUTPUT_HASH_KEY)) { + if (logSender != L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR) { + revert InvalidLogSender(logSender, logKey); + } + logOutput.l2DAValidatorOutputHash = logValue; } else if (logKey == uint256(SystemLogKey.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY)) { - require(logSender == L2_BOOTLOADER_ADDRESS, "bu"); - require(_expectedSystemContractUpgradeTxHash == logValue, "ut"); + if (logSender != L2_BOOTLOADER_ADDRESS) { + revert InvalidLogSender(logSender, logKey); + } + if (_expectedSystemContractUpgradeTxHash != logValue) { + revert TxHashMismatch(); + } } else if (logKey > uint256(SystemLogKey.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY)) { - revert("ul"); + revert UnexpectedSystemLog(logKey); } } - // We only require 13 logs to be checked, the 14th is if we are expecting a protocol upgrade - // Without the protocol upgrade we expect 13 logs: 2^13 - 1 = 8191 - // With the protocol upgrade we expect 14 logs: 2^14 - 1 = 16383 + // We only require 8 logs to be checked, the 9th is if we are expecting a protocol upgrade + // Without the protocol upgrade we expect 8 logs: 2^8 - 1 = 255 + // With the protocol upgrade we expect 9 logs: 2^9 - 1 = 511 if (_expectedSystemContractUpgradeTxHash == bytes32(0)) { - require(processedLogs == 8191, "b7"); - } else { - require(processedLogs == 16383, "b8"); + if (processedLogs != 127) { + revert MissingSystemLogs(127, processedLogs); + } + } else if (processedLogs != 255) { + revert MissingSystemLogs(255, processedLogs); } } - /// @inheritdoc IExecutor - function commitBatches( - StoredBatchInfo memory _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData - ) external nonReentrant onlyValidator { - _commitBatches(_lastCommittedBatchData, _newBatchesData); - } - /// @inheritdoc IExecutor function commitBatchesSharedBridge( uint256, // _chainId - StoredBatchInfo memory _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData - ) external nonReentrant onlyValidator { - _commitBatches(_lastCommittedBatchData, _newBatchesData); - } - - function _commitBatches( - StoredBatchInfo memory _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData - ) internal { + uint256 _processFrom, + uint256 _processTo, + bytes calldata _commitData + ) external nonReentrant onlyValidator chainOnCurrentBridgehub { // check that we have the right protocol version // three comments: // 1. A chain has to keep their protocol version up to date, as processing a block requires the latest or previous protocol version @@ -240,28 +243,39 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { // 2. A chain might become out of sync if it launches while we are in the middle of a protocol upgrade. This would mean they cannot process their genesis upgrade // as their protocolversion would be outdated, and they also cannot process the protocol upgrade tx as they have a pending upgrade. // 3. The protocol upgrade is increased in the BaseZkSyncUpgrade, in the executor only the systemContractsUpgradeTxHash is checked - require( - IStateTransitionManager(s.stateTransitionManager).protocolVersionIsActive(s.protocolVersion), - "Executor facet: wrong protocol version" - ); + if (!IChainTypeManager(s.chainTypeManager).protocolVersionIsActive(s.protocolVersion)) { + revert InvalidProtocolVersion(); + } + (StoredBatchInfo memory lastCommittedBatchData, CommitBatchInfo[] memory newBatchesData) = BatchDecoder + .decodeAndCheckCommitData(_commitData, _processFrom, _processTo); // With the new changes for EIP-4844, namely the restriction on number of blobs per block, we only allow for a single batch to be committed at a time. - require(_newBatchesData.length == 1, "e4"); + // Note: Don't need to check that `_processFrom` == `_processTo` because there is only one batch, + // and so the range checked in the `decodeAndCheckCommitData` is enough. + if (newBatchesData.length != 1) { + revert CanOnlyProcessOneBatch(); + } // Check that we commit batches after last committed batch - require(s.storedBatchHashes[s.totalBatchesCommitted] == _hashStoredBatchInfo(_lastCommittedBatchData), "i"); // incorrect previous batch data + if (s.storedBatchHashes[s.totalBatchesCommitted] != _hashStoredBatchInfo(lastCommittedBatchData)) { + // incorrect previous batch data + revert BatchHashMismatch( + s.storedBatchHashes[s.totalBatchesCommitted], + _hashStoredBatchInfo(lastCommittedBatchData) + ); + } bytes32 systemContractsUpgradeTxHash = s.l2SystemContractsUpgradeTxHash; // Upgrades are rarely done so we optimize a case with no active system contracts upgrade. if (systemContractsUpgradeTxHash == bytes32(0) || s.l2SystemContractsUpgradeBatchNumber != 0) { - _commitBatchesWithoutSystemContractsUpgrade(_lastCommittedBatchData, _newBatchesData); + _commitBatchesWithoutSystemContractsUpgrade(lastCommittedBatchData, newBatchesData); } else { _commitBatchesWithSystemContractsUpgrade( - _lastCommittedBatchData, - _newBatchesData, + lastCommittedBatchData, + newBatchesData, systemContractsUpgradeTxHash ); } - s.totalBatchesCommitted = s.totalBatchesCommitted + _newBatchesData.length; + s.totalBatchesCommitted = s.totalBatchesCommitted + newBatchesData.length; } /// @dev Commits new batches without any system contracts upgrade. @@ -269,8 +283,10 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { /// @param _newBatchesData An array of batch data that needs to be committed. function _commitBatchesWithoutSystemContractsUpgrade( StoredBatchInfo memory _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData + CommitBatchInfo[] memory _newBatchesData ) internal { + // We disable this check because calldata array length is cheap. + // solhint-disable-next-line gas-length-in-loops for (uint256 i = 0; i < _newBatchesData.length; i = i.uncheckedInc()) { _lastCommittedBatchData = _commitOneBatch(_lastCommittedBatchData, _newBatchesData[i], bytes32(0)); @@ -289,7 +305,7 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { /// @param _systemContractUpgradeTxHash The transaction hash of the system contract upgrade. function _commitBatchesWithSystemContractsUpgrade( StoredBatchInfo memory _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData, + CommitBatchInfo[] memory _newBatchesData, bytes32 _systemContractUpgradeTxHash ) internal { // The system contract upgrade is designed to be executed atomically with the new bootloader, a default account, @@ -297,12 +313,16 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { // carried out within the first batch committed after the upgrade. // While the logic of the contract ensures that the s.l2SystemContractsUpgradeBatchNumber is 0 when this function is called, - // this check is added just in case. Since it is a hot read, it does not encure noticeable gas cost. - require(s.l2SystemContractsUpgradeBatchNumber == 0, "ik"); + // this check is added just in case. Since it is a hot read, it does not incur noticeable gas cost. + if (s.l2SystemContractsUpgradeBatchNumber != 0) { + revert UpgradeBatchNumberIsNotZero(); + } // Save the batch number where the upgrade transaction was executed. s.l2SystemContractsUpgradeBatchNumber = _newBatchesData[0].batchNumber; + // We disable this check because calldata array length is cheap. + // solhint-disable-next-line gas-length-in-loops for (uint256 i = 0; i < _newBatchesData.length; i = i.uncheckedInc()) { // The upgrade transaction must only be included in the first batch. bytes32 expectedUpgradeTxHash = i == 0 ? _systemContractUpgradeTxHash : bytes32(0); @@ -331,48 +351,112 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { } } + function _rollingHash(bytes32[] memory _hashes) internal pure returns (bytes32) { + bytes32 hash = EMPTY_STRING_KECCAK; + uint256 nHashes = _hashes.length; + for (uint256 i = 0; i < nHashes; i = i.uncheckedInc()) { + hash = keccak256(abi.encode(hash, _hashes[i])); + } + return hash; + } + + /// @dev Checks that the data of the batch is correct and can be executed + /// @dev Verifies that batch number, batch hash and priority operations hash are correct + function _checkBatchData( + StoredBatchInfo memory _storedBatch, + uint256 _executedBatchIdx, + bytes32 _priorityOperationsHash + ) internal view { + uint256 currentBatchNumber = _storedBatch.batchNumber; + if (currentBatchNumber != s.totalBatchesExecuted + _executedBatchIdx + 1) { + revert NonSequentialBatch(); + } + if (_hashStoredBatchInfo(_storedBatch) != s.storedBatchHashes[currentBatchNumber]) { + revert BatchHashMismatch(s.storedBatchHashes[currentBatchNumber], _hashStoredBatchInfo(_storedBatch)); + } + if (_priorityOperationsHash != _storedBatch.priorityOperationsHash) { + revert PriorityOperationsRollingHashMismatch(); + } + } + /// @dev Executes one batch /// @dev 1. Processes all pending operations (Complete priority requests) /// @dev 2. Finalizes batch on Ethereum /// @dev _executedBatchIdx is an index in the array of the batches that we want to execute together function _executeOneBatch(StoredBatchInfo memory _storedBatch, uint256 _executedBatchIdx) internal { - uint256 currentBatchNumber = _storedBatch.batchNumber; - require(currentBatchNumber == s.totalBatchesExecuted + _executedBatchIdx + 1, "k"); // Execute batches in order - require( - _hashStoredBatchInfo(_storedBatch) == s.storedBatchHashes[currentBatchNumber], - "exe10" // executing batch should be committed - ); - bytes32 priorityOperationsHash = _collectOperationsFromPriorityQueue(_storedBatch.numberOfLayer1Txs); - require(priorityOperationsHash == _storedBatch.priorityOperationsHash, "x"); // priority operations hash does not match to expected + _checkBatchData(_storedBatch, _executedBatchIdx, priorityOperationsHash); + + uint256 currentBatchNumber = _storedBatch.batchNumber; // Save root hash of L2 -> L1 logs tree s.l2LogsRootHashes[currentBatchNumber] = _storedBatch.l2LogsTreeRoot; + + // Once the batch is executed, we include its message to the message root. + IMessageRoot messageRootContract = IBridgehub(s.bridgehub).messageRoot(); + messageRootContract.addChainBatchRoot(s.chainId, currentBatchNumber, _storedBatch.l2LogsTreeRoot); + + // IBridgehub bridgehub = IBridgehub(s.bridgehub); + // bridgehub.messageRoot().addChainBatchRoot( + // s.chainId, + // _storedBatch.l2LogsTreeRoot, + // block.chainid != bridgehub.L1_CHAIN_ID() + // ); } - /// @inheritdoc IExecutor - function executeBatchesSharedBridge( - uint256, - StoredBatchInfo[] calldata _batchesData - ) external nonReentrant onlyValidator { - _executeBatches(_batchesData); + /// @notice Executes one batch + /// @dev 1. Processes all pending operations (Complete priority requests) + /// @dev 2. Finalizes batch + /// @dev _executedBatchIdx is an index in the array of the batches that we want to execute together + function _executeOneBatch( + StoredBatchInfo memory _storedBatch, + PriorityOpsBatchInfo memory _priorityOpsData, + uint256 _executedBatchIdx + ) internal { + require(_priorityOpsData.itemHashes.length == _storedBatch.numberOfLayer1Txs, "zxc"); + bytes32 priorityOperationsHash = _rollingHash(_priorityOpsData.itemHashes); + _checkBatchData(_storedBatch, _executedBatchIdx, priorityOperationsHash); + s.priorityTree.processBatch(_priorityOpsData); + + uint256 currentBatchNumber = _storedBatch.batchNumber; + + // Save root hash of L2 -> L1 logs tree + s.l2LogsRootHashes[_storedBatch.batchNumber] = _storedBatch.l2LogsTreeRoot; + + // Once the batch is executed, we include its message to the message root. + IMessageRoot messageRootContract = IBridgehub(s.bridgehub).messageRoot(); + messageRootContract.addChainBatchRoot(s.chainId, currentBatchNumber, _storedBatch.l2LogsTreeRoot); } /// @inheritdoc IExecutor - function executeBatches(StoredBatchInfo[] calldata _batchesData) external nonReentrant onlyValidator { - _executeBatches(_batchesData); - } + function executeBatchesSharedBridge( + uint256, // _chainId + uint256 _processFrom, + uint256 _processTo, + bytes calldata _executeData + ) external nonReentrant onlyValidator chainOnCurrentBridgehub { + (StoredBatchInfo[] memory batchesData, PriorityOpsBatchInfo[] memory priorityOpsData) = BatchDecoder + .decodeAndCheckExecuteData(_executeData, _processFrom, _processTo); + uint256 nBatches = batchesData.length; + require(batchesData.length == priorityOpsData.length, "bp"); - function _executeBatches(StoredBatchInfo[] calldata _batchesData) internal { - uint256 nBatches = _batchesData.length; for (uint256 i = 0; i < nBatches; i = i.uncheckedInc()) { - _executeOneBatch(_batchesData[i], i); - emit BlockExecution(_batchesData[i].batchNumber, _batchesData[i].batchHash, _batchesData[i].commitment); + if (s.priorityTree.startIndex <= s.priorityQueue.getFirstUnprocessedPriorityTx()) { + _executeOneBatch(batchesData[i], priorityOpsData[i], i); + } else { + require(priorityOpsData[i].leftPath.length == 0, "le"); + require(priorityOpsData[i].rightPath.length == 0, "re"); + require(priorityOpsData[i].itemHashes.length == 0, "ih"); + _executeOneBatch(batchesData[i], i); + } + emit BlockExecution(batchesData[i].batchNumber, batchesData[i].batchHash, batchesData[i].commitment); } uint256 newTotalBatchesExecuted = s.totalBatchesExecuted + nBatches; s.totalBatchesExecuted = newTotalBatchesExecuted; - require(newTotalBatchesExecuted <= s.totalBatchesVerified, "n"); // Can't execute batches more than committed and proven currently. + if (newTotalBatchesExecuted > s.totalBatchesVerified) { + revert CantExecuteUnprovenBatches(); + } uint256 batchWhenUpgradeHappened = s.l2SystemContractsUpgradeBatchNumber; if (batchWhenUpgradeHappened != 0 && batchWhenUpgradeHappened <= newTotalBatchesExecuted) { @@ -381,71 +465,69 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { } } - /// @inheritdoc IExecutor - function proveBatches( - StoredBatchInfo calldata _prevBatch, - StoredBatchInfo[] calldata _committedBatches, - ProofInput calldata _proof - ) external nonReentrant onlyValidator { - _proveBatches(_prevBatch, _committedBatches, _proof); - } - /// @inheritdoc IExecutor function proveBatchesSharedBridge( uint256, // _chainId - StoredBatchInfo calldata _prevBatch, - StoredBatchInfo[] calldata _committedBatches, - ProofInput calldata _proof - ) external nonReentrant onlyValidator { - _proveBatches(_prevBatch, _committedBatches, _proof); - } + uint256 _processBatchFrom, + uint256 _processBatchTo, + bytes calldata _proofData + ) external nonReentrant onlyValidator chainOnCurrentBridgehub { + ( + StoredBatchInfo memory prevBatch, + StoredBatchInfo[] memory committedBatches, + uint256[] memory proof + ) = BatchDecoder.decodeAndCheckProofData(_proofData, _processBatchFrom, _processBatchTo); - function _proveBatches( - StoredBatchInfo calldata _prevBatch, - StoredBatchInfo[] calldata _committedBatches, - ProofInput calldata _proof - ) internal { // Save the variables into the stack to save gas on reading them later uint256 currentTotalBatchesVerified = s.totalBatchesVerified; - uint256 committedBatchesLength = _committedBatches.length; + uint256 committedBatchesLength = committedBatches.length; // Initialize the array, that will be used as public input to the ZKP uint256[] memory proofPublicInput = new uint256[](committedBatchesLength); // Check that the batch passed by the validator is indeed the first unverified batch - require(_hashStoredBatchInfo(_prevBatch) == s.storedBatchHashes[currentTotalBatchesVerified], "t1"); + if (_hashStoredBatchInfo(prevBatch) != s.storedBatchHashes[currentTotalBatchesVerified]) { + revert BatchHashMismatch(s.storedBatchHashes[currentTotalBatchesVerified], _hashStoredBatchInfo(prevBatch)); + } - bytes32 prevBatchCommitment = _prevBatch.commitment; + bytes32 prevBatchCommitment = prevBatch.commitment; for (uint256 i = 0; i < committedBatchesLength; i = i.uncheckedInc()) { currentTotalBatchesVerified = currentTotalBatchesVerified.uncheckedInc(); - require( - _hashStoredBatchInfo(_committedBatches[i]) == s.storedBatchHashes[currentTotalBatchesVerified], - "o1" - ); + if (_hashStoredBatchInfo(committedBatches[i]) != s.storedBatchHashes[currentTotalBatchesVerified]) { + revert BatchHashMismatch( + s.storedBatchHashes[currentTotalBatchesVerified], + _hashStoredBatchInfo(committedBatches[i]) + ); + } - bytes32 currentBatchCommitment = _committedBatches[i].commitment; + bytes32 currentBatchCommitment = committedBatches[i].commitment; proofPublicInput[i] = _getBatchProofPublicInput(prevBatchCommitment, currentBatchCommitment); prevBatchCommitment = currentBatchCommitment; } - require(currentTotalBatchesVerified <= s.totalBatchesCommitted, "q"); + if (currentTotalBatchesVerified > s.totalBatchesCommitted) { + revert VerifiedBatchesExceedsCommittedBatches(); + } - _verifyProof(proofPublicInput, _proof); + _verifyProof(proofPublicInput, proof); emit BlocksVerification(s.totalBatchesVerified, currentTotalBatchesVerified); s.totalBatchesVerified = currentTotalBatchesVerified; } - function _verifyProof(uint256[] memory proofPublicInput, ProofInput calldata _proof) internal view { + function _verifyProof(uint256[] memory proofPublicInput, uint256[] memory _proof) internal { // We can only process 1 batch proof at a time. - require(proofPublicInput.length == 1, "t4"); + if (proofPublicInput.length != 1) { + revert CanOnlyProcessOneBatch(); + } - bool successVerifyProof = s.verifier.verify( - proofPublicInput, - _proof.serializedProof, - _proof.recursiveAggregationInput - ); - require(successVerifyProof, "p"); // Proof verification fail + (bool callSuccess, bytes memory successVerifyProof) = address(s.dualVerifier).delegatecall(abi.encodeWithSelector(s.dualVerifier.verify.selector, proofPublicInput, _proof)); + if(!callSuccess){ + revert DelegateCallFailed(successVerifyProof); + } + if (!abi.decode(successVerifyProof, (bool))) { + revert InvalidProof(); + } } /// @dev Gets zk proof public input @@ -457,19 +539,18 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { uint256(keccak256(abi.encodePacked(_prevBatchCommitment, _currentBatchCommitment))) >> PUBLIC_INPUT_SHIFT; } - /// @inheritdoc IExecutor - function revertBatches(uint256 _newLastBatch) external nonReentrant onlyValidatorOrStateTransitionManager { - _revertBatches(_newLastBatch); - } - /// @inheritdoc IExecutor function revertBatchesSharedBridge(uint256, uint256 _newLastBatch) external nonReentrant onlyValidator { _revertBatches(_newLastBatch); } - function _revertBatches(uint256 _newLastBatch) internal { - require(s.totalBatchesCommitted > _newLastBatch, "v1"); // The last committed batch is less than new last batch - require(_newLastBatch >= s.totalBatchesExecuted, "v2"); // Already executed batches cannot be reverted + function _revertBatches(uint256 _newLastBatch) internal chainOnCurrentBridgehub { + if (s.totalBatchesCommitted <= _newLastBatch) { + revert RevertedBatchNotAfterNewLastBatch(); + } + if (_newLastBatch < s.totalBatchesExecuted) { + revert CantRevertExecutedBatch(); + } if (_newLastBatch < s.totalBatchesVerified) { s.totalBatchesVerified = _newLastBatch; @@ -487,7 +568,7 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { /// @dev Creates batch commitment from its data function _createBatchCommitment( - CommitBatchInfo calldata _newBatchData, + CommitBatchInfo memory _newBatchData, bytes32 _stateDiffHash, bytes32[] memory _blobCommitments, bytes32[] memory _blobHashes @@ -501,7 +582,7 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { return keccak256(abi.encode(passThroughDataHash, metadataHash, auxiliaryOutputHash)); } - function _batchPassThroughData(CommitBatchInfo calldata _batch) internal pure returns (bytes memory) { + function _batchPassThroughData(CommitBatchInfo memory _batch) internal pure returns (bytes memory) { return abi.encodePacked( // solhint-disable-next-line func-named-parameters @@ -525,12 +606,14 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { } function _batchAuxiliaryOutput( - CommitBatchInfo calldata _batch, + CommitBatchInfo memory _batch, bytes32 _stateDiffHash, bytes32[] memory _blobCommitments, bytes32[] memory _blobHashes ) internal pure returns (bytes memory) { - require(_batch.systemLogs.length <= MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, "pu"); + if (_batch.systemLogs.length > MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES) { + revert SystemLogsSizeTooBig(); + } bytes32 l2ToL1LogsHash = keccak256(_batch.systemLogs); @@ -555,8 +638,9 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { ) internal pure returns (bytes32[] memory blobAuxOutputWords) { // These invariants should be checked by the caller of this function, but we double check // just in case. - require(_blobCommitments.length == MAX_NUMBER_OF_BLOBS, "b10"); - require(_blobHashes.length == MAX_NUMBER_OF_BLOBS, "b11"); + if (_blobCommitments.length != TOTAL_BLOBS_IN_COMMITMENT || _blobHashes.length != TOTAL_BLOBS_IN_COMMITMENT) { + revert InvalidNumberOfBlobs(TOTAL_BLOBS_IN_COMMITMENT, _blobCommitments.length, _blobHashes.length); + } // for each blob we have: // linear hash (hash of preimage from system logs) and @@ -568,7 +652,7 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { blobAuxOutputWords = new bytes32[](2 * TOTAL_BLOBS_IN_COMMITMENT); - for (uint256 i = 0; i < MAX_NUMBER_OF_BLOBS; i++) { + for (uint256 i = 0; i < TOTAL_BLOBS_IN_COMMITMENT; ++i) { blobAuxOutputWords[i * 2] = _blobHashes[i]; blobAuxOutputWords[i * 2 + 1] = _blobCommitments[i]; } @@ -588,85 +672,4 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor { function _setBit(uint256 _bitMap, uint8 _index) internal pure returns (uint256) { return _bitMap | (1 << _index); } - - /// @notice Calls the point evaluation precompile and verifies the output - /// Verify p(z) = y given commitment that corresponds to the polynomial p(x) and a KZG proof. - /// Also verify that the provided commitment matches the provided versioned_hash. - /// - function _pointEvaluationPrecompile( - bytes32 _versionedHash, - bytes32 _openingPoint, - bytes calldata _openingValueCommitmentProof - ) internal view { - bytes memory precompileInput = abi.encodePacked(_versionedHash, _openingPoint, _openingValueCommitmentProof); - - (bool success, bytes memory data) = POINT_EVALUATION_PRECOMPILE_ADDR.staticcall(precompileInput); - - // We verify that the point evaluation precompile call was successful by testing the latter 32 bytes of the - // response is equal to BLS_MODULUS as defined in https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile - require(success, "failed to call point evaluation precompile"); - (, uint256 result) = abi.decode(data, (uint256, uint256)); - require(result == BLS_MODULUS, "precompile unexpected output"); - } - - /// @dev Verifies that the blobs contain the correct data by calling the point evaluation precompile. For the precompile we need: - /// versioned hash || opening point || opening value || commitment || proof - /// the _pubdataCommitments will contain the last 4 values, the versioned hash is pulled from the BLOBHASH opcode - /// pubdataCommitments is a list of: opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes)) = 144 bytes - function _verifyBlobInformation( - bytes calldata _pubdataCommitments, - bytes32[] memory _blobHashes - ) internal view returns (bytes32[] memory blobCommitments) { - uint256 versionedHashIndex = 0; - - require(_pubdataCommitments.length > 0, "pl"); - require(_pubdataCommitments.length <= PUBDATA_COMMITMENT_SIZE * MAX_NUMBER_OF_BLOBS, "bd"); - require(_pubdataCommitments.length % PUBDATA_COMMITMENT_SIZE == 0, "bs"); - blobCommitments = new bytes32[](MAX_NUMBER_OF_BLOBS); - - for (uint256 i = 0; i < _pubdataCommitments.length; i += PUBDATA_COMMITMENT_SIZE) { - bytes32 blobVersionedHash = _getBlobVersionedHash(versionedHashIndex); - - require(blobVersionedHash != bytes32(0), "vh"); - - // First 16 bytes is the opening point. While we get the point as 16 bytes, the point evaluation precompile - // requires it to be 32 bytes. The blob commitment must use the opening point as 16 bytes though. - bytes32 openingPoint = bytes32( - uint256(uint128(bytes16(_pubdataCommitments[i:i + PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET]))) - ); - - _pointEvaluationPrecompile( - blobVersionedHash, - openingPoint, - _pubdataCommitments[i + PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET:i + PUBDATA_COMMITMENT_SIZE] - ); - - // Take the hash of the versioned hash || opening point || claimed value - blobCommitments[versionedHashIndex] = keccak256( - abi.encodePacked(blobVersionedHash, _pubdataCommitments[i:i + PUBDATA_COMMITMENT_COMMITMENT_OFFSET]) - ); - versionedHashIndex += 1; - } - - // This check is required because we want to ensure that there aren't any extra blobs trying to be published. - // Calling the BLOBHASH opcode with an index > # blobs - 1 yields bytes32(0) - bytes32 versionedHash = _getBlobVersionedHash(versionedHashIndex); - require(versionedHash == bytes32(0), "lh"); - - // We verify that for each set of blobHash/blobCommitment are either both empty - // or there are values for both. - for (uint256 i = 0; i < MAX_NUMBER_OF_BLOBS; i++) { - require( - (_blobHashes[i] == bytes32(0) && blobCommitments[i] == bytes32(0)) || - (_blobHashes[i] != bytes32(0) && blobCommitments[i] != bytes32(0)), - "bh" - ); - } - } - - function _getBlobVersionedHash(uint256 _index) internal view virtual returns (bytes32 versionedHash) { - assembly { - versionedHash := blobhash(_index) - } - } } diff --git a/l1-contracts/contracts/state-transition/chain-deps/facets/Getters.sol b/l1-contracts/contracts/state-transition/chain-deps/facets/Getters.sol index ab87d31f0..8aaaca45b 100644 --- a/l1-contracts/contracts/state-transition/chain-deps/facets/Getters.sol +++ b/l1-contracts/contracts/state-transition/chain-deps/facets/Getters.sol @@ -2,29 +2,33 @@ pragma solidity 0.8.24; -import {SafeCast} from "@openzeppelin/contracts/utils/math/SafeCast.sol"; +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; -import {ZkSyncHyperchainBase} from "./ZkSyncHyperchainBase.sol"; -import {PubdataPricingMode} from "../ZkSyncHyperchainStorage.sol"; +import {ZKChainBase} from "./ZKChainBase.sol"; +import {PubdataPricingMode} from "../ZKChainStorage.sol"; import {VerifierParams} from "../../../state-transition/chain-interfaces/IVerifier.sol"; import {Diamond} from "../../libraries/Diamond.sol"; -import {PriorityQueue, PriorityOperation} from "../../../state-transition/libraries/PriorityQueue.sol"; +import {PriorityQueue} from "../../../state-transition/libraries/PriorityQueue.sol"; +import {PriorityTree} from "../../../state-transition/libraries/PriorityTree.sol"; +import {IBridgehub} from "../../../bridgehub/IBridgehub.sol"; import {UncheckedMath} from "../../../common/libraries/UncheckedMath.sol"; import {IGetters} from "../../chain-interfaces/IGetters.sol"; import {ILegacyGetters} from "../../chain-interfaces/ILegacyGetters.sol"; +import {InvalidSelector} from "../../../common/L1ContractErrors.sol"; import {SemVer} from "../../../common/libraries/SemVer.sol"; // While formally the following import is not used, it is needed to inherit documentation from it -import {IZkSyncHyperchainBase} from "../../chain-interfaces/IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "../../chain-interfaces/IZKChainBase.sol"; /// @title Getters Contract implements functions for getting contract state from outside the blockchain. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -contract GettersFacet is ZkSyncHyperchainBase, IGetters, ILegacyGetters { +contract GettersFacet is ZKChainBase, IGetters, ILegacyGetters { using UncheckedMath for uint256; using PriorityQueue for PriorityQueue.Queue; + using PriorityTree for PriorityTree.Tree; - /// @inheritdoc IZkSyncHyperchainBase + /// @inheritdoc IZKChainBase string public constant override getName = "GettersFacet"; /*////////////////////////////////////////////////////////////// @@ -32,8 +36,20 @@ contract GettersFacet is ZkSyncHyperchainBase, IGetters, ILegacyGetters { //////////////////////////////////////////////////////////////*/ /// @inheritdoc IGetters - function getVerifier() external view returns (address) { - return address(s.verifier); + function getDualVerifier() external view returns (address) { + return address(s.dualVerifier); + } + /// @inheritdoc IGetters + function getPlonkVerifier() external view returns (address) { + return s.plonkVerifier; + } + /// @inheritdoc IGetters + function getFflonkVerifier() external view returns (address) { + return s.fflonkVerifier; + } + /// @inheritdoc IGetters + function getFflonkProofLength() external view returns (uint256) { + return s.fflonkProofLength; } /// @inheritdoc IGetters @@ -52,18 +68,23 @@ contract GettersFacet is ZkSyncHyperchainBase, IGetters, ILegacyGetters { } /// @inheritdoc IGetters - function getStateTransitionManager() external view returns (address) { - return s.stateTransitionManager; + function getChainTypeManager() external view returns (address) { + return s.chainTypeManager; + } + + /// @inheritdoc IGetters + function getChainId() external view returns (uint256) { + return s.chainId; } /// @inheritdoc IGetters function getBaseToken() external view returns (address) { - return s.baseToken; + return IBridgehub(s.bridgehub).baseToken(s.chainId); } /// @inheritdoc IGetters - function getBaseTokenBridge() external view returns (address) { - return s.baseTokenBridge; + function getBaseTokenAssetId() external view returns (bytes32) { + return s.baseTokenAssetId; } /// @inheritdoc IGetters @@ -91,24 +112,37 @@ contract GettersFacet is ZkSyncHyperchainBase, IGetters, ILegacyGetters { return s.totalBatchesExecuted; } + /// @inheritdoc IGetters + function getTransactionFilterer() external view returns (address) { + return s.transactionFilterer; + } + /// @inheritdoc IGetters function getTotalPriorityTxs() external view returns (uint256) { - return s.priorityQueue.getTotalPriorityTxs(); + return _getTotalPriorityTxs(); } /// @inheritdoc IGetters function getFirstUnprocessedPriorityTx() external view returns (uint256) { - return s.priorityQueue.getFirstUnprocessedPriorityTx(); + if (s.priorityQueue.getFirstUnprocessedPriorityTx() >= s.priorityTree.startIndex) { + return s.priorityTree.getFirstUnprocessedPriorityTx(); + } else { + return s.priorityQueue.getFirstUnprocessedPriorityTx(); + } } /// @inheritdoc IGetters - function getPriorityQueueSize() external view returns (uint256) { - return s.priorityQueue.getSize(); + function getPriorityTreeRoot() external view returns (bytes32) { + return s.priorityTree.getRoot(); } /// @inheritdoc IGetters - function priorityQueueFrontOperation() external view returns (PriorityOperation memory) { - return s.priorityQueue.front(); + function getPriorityQueueSize() external view returns (uint256) { + if (s.priorityQueue.getFirstUnprocessedPriorityTx() >= s.priorityTree.startIndex) { + return s.priorityTree.getSize(); + } else { + return s.priorityQueue.getSize(); + } } /// @inheritdoc IGetters @@ -189,7 +223,9 @@ contract GettersFacet is ZkSyncHyperchainBase, IGetters, ILegacyGetters { /// @inheritdoc IGetters function isFunctionFreezable(bytes4 _selector) external view returns (bool) { Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage(); - require(ds.selectorToFacet[_selector].facetAddress != address(0), "g2"); + if (ds.selectorToFacet[_selector].facetAddress == address(0)) { + revert InvalidSelector(_selector); + } return ds.selectorToFacet[_selector].isFreezable; } @@ -203,6 +239,15 @@ contract GettersFacet is ZkSyncHyperchainBase, IGetters, ILegacyGetters { return s.feeParams.pubdataPricingMode; } + /// @inheritdoc IGetters + function getSettlementLayer() external view returns (address) { + return s.settlementLayer; + } + + function getDAValidatorPair() external view returns (address, address) { + return (s.l1DAValidator, s.l2DAValidator); + } + /*////////////////////////////////////////////////////////////// DIAMOND LOUPE //////////////////////////////////////////////////////////////*/ diff --git a/l1-contracts/contracts/state-transition/chain-deps/facets/Mailbox.sol b/l1-contracts/contracts/state-transition/chain-deps/facets/Mailbox.sol index 752913b3f..d230a04b9 100644 --- a/l1-contracts/contracts/state-transition/chain-deps/facets/Mailbox.sol +++ b/l1-contracts/contracts/state-transition/chain-deps/facets/Mailbox.sol @@ -2,54 +2,67 @@ pragma solidity 0.8.24; -import {Math} from "@openzeppelin/contracts/utils/math/Math.sol"; +// solhint-disable gas-custom-errors, reason-string + +import {Math} from "@openzeppelin/contracts-v4/utils/math/Math.sol"; import {IMailbox} from "../../chain-interfaces/IMailbox.sol"; +import {IChainTypeManager} from "../../IChainTypeManager.sol"; +import {IBridgehub} from "../../../bridgehub/IBridgehub.sol"; + import {ITransactionFilterer} from "../../chain-interfaces/ITransactionFilterer.sol"; -import {Merkle} from "../../libraries/Merkle.sol"; +import {Merkle} from "../../../common/libraries/Merkle.sol"; import {PriorityQueue, PriorityOperation} from "../../libraries/PriorityQueue.sol"; +import {PriorityTree} from "../../libraries/PriorityTree.sol"; import {TransactionValidator} from "../../libraries/TransactionValidator.sol"; import {WritePriorityOpParams, L2CanonicalTransaction, L2Message, L2Log, TxStatus, BridgehubL2TransactionRequest} from "../../../common/Messaging.sol"; -import {FeeParams, PubdataPricingMode} from "../ZkSyncHyperchainStorage.sol"; +import {MessageHashing} from "../../../common/libraries/MessageHashing.sol"; +import {FeeParams, PubdataPricingMode} from "../ZKChainStorage.sol"; import {UncheckedMath} from "../../../common/libraries/UncheckedMath.sol"; import {L2ContractHelper} from "../../../common/libraries/L2ContractHelper.sol"; import {AddressAliasHelper} from "../../../vendor/AddressAliasHelper.sol"; -import {ZkSyncHyperchainBase} from "./ZkSyncHyperchainBase.sol"; -import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, ETH_TOKEN_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, PRIORITY_OPERATION_L2_TX_TYPE, PRIORITY_EXPIRATION, MAX_NEW_FACTORY_DEPS} from "../../../common/Config.sol"; -import {L2_BOOTLOADER_ADDRESS, L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR} from "../../../common/L2ContractAddresses.sol"; +import {ZKChainBase} from "./ZKChainBase.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, L1_GAS_PER_PUBDATA_BYTE, L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, PRIORITY_OPERATION_L2_TX_TYPE, PRIORITY_EXPIRATION, MAX_NEW_FACTORY_DEPS, SETTLEMENT_LAYER_RELAY_SENDER, SUPPORTED_PROOF_METADATA_VERSION} from "../../../common/Config.sol"; +import {L2_BOOTLOADER_ADDRESS, L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, L2_BRIDGEHUB_ADDR} from "../../../common/L2ContractAddresses.sol"; + +import {IL1AssetRouter} from "../../../bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "../../../bridgehub/IBridgehub.sol"; -import {IL1SharedBridge} from "../../../bridge/interfaces/IL1SharedBridge.sol"; +import {IChainTypeManager} from "../../IChainTypeManager.sol"; +import {MerklePathEmpty, OnlyEraSupported, BatchNotExecuted, HashedLogIsDefault, BaseTokenGasPriceDenominatorNotSet, TransactionNotAllowed, GasPerPubdataMismatch, TooManyFactoryDeps, MsgValueTooLow} from "../../../common/L1ContractErrors.sol"; // While formally the following import is not used, it is needed to inherit documentation from it -import {IZkSyncHyperchainBase} from "../../chain-interfaces/IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "../../chain-interfaces/IZKChainBase.sol"; -/// @title zkSync Mailbox contract providing interfaces for L1 <-> L2 interaction. +/// @title ZKsync Mailbox contract providing interfaces for L1 <-> L2 interaction. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { +contract MailboxFacet is ZKChainBase, IMailbox { using UncheckedMath for uint256; using PriorityQueue for PriorityQueue.Queue; + using PriorityTree for PriorityTree.Tree; - /// @inheritdoc IZkSyncHyperchainBase + /// @inheritdoc IZKChainBase string public constant override getName = "MailboxFacet"; /// @dev Era's chainID - uint256 immutable ERA_CHAIN_ID; + uint256 internal immutable ERA_CHAIN_ID; - constructor(uint256 _eraChainId) { - ERA_CHAIN_ID = _eraChainId; - } + /// @notice The chain id of L1. This contract can be deployed on multiple layers, but this value is still equal to the + /// L1 that is at the most base layer. + uint256 internal immutable L1_CHAIN_ID; - /// @inheritdoc IMailbox - function transferEthToSharedBridge() external onlyBaseTokenBridge { - require(s.chainId == ERA_CHAIN_ID, "Mailbox: transferEthToSharedBridge only available for Era on mailbox"); + modifier onlyL1() { + require(block.chainid == L1_CHAIN_ID, "MailboxFacet: not L1"); + _; + } - uint256 amount = address(this).balance; - address baseTokenBridgeAddress = s.baseTokenBridge; - IL1SharedBridge(baseTokenBridgeAddress).receiveEth{value: amount}(ERA_CHAIN_ID); + constructor(uint256 _eraChainId, uint256 _l1ChainId) { + ERA_CHAIN_ID = _eraChainId; + L1_CHAIN_ID = _l1ChainId; } - /// @notice when requesting transactions through the bridgehub + /// @inheritdoc IMailbox function bridgehubRequestL2Transaction( BridgehubL2TransactionRequest calldata _request ) external onlyBridgehub returns (bytes32 canonicalTxHash) { @@ -60,7 +73,7 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { function proveL2MessageInclusion( uint256 _batchNumber, uint256 _index, - L2Message memory _message, + L2Message calldata _message, bytes32[] calldata _proof ) public view returns (bool) { return _proveL2LogInclusion(_batchNumber, _index, _L2MessageToLog(_message), _proof); @@ -70,7 +83,7 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { function proveL2LogInclusion( uint256 _batchNumber, uint256 _index, - L2Log memory _log, + L2Log calldata _log, bytes32[] calldata _proof ) external view returns (bool) { return _proveL2LogInclusion(_batchNumber, _index, _log, _proof); @@ -106,6 +119,174 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { return _proveL2LogInclusion(_l2BatchNumber, _l2MessageIndex, l2Log, _merkleProof); } + // /// @inheritdoc IMailbox + function proveL1ToL2TransactionStatusViaGateway( + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] calldata _merkleProof, + TxStatus _status + ) public view returns (bool) {} + + function _parseProofMetadata( + bytes32[] calldata _proof + ) internal pure returns (uint256 proofStartIndex, uint256 logLeafProofLen, uint256 batchLeafProofLen) { + bytes32 proofMetadata = _proof[0]; + + // We support two formats of the proofs: + // 1. The old format, where `_proof` is just a plain Merkle proof. + // 2. The new format, where the first element of the `_proof` is encoded metadata, which consists of the following: + // - first byte: metadata version (0x01). + // - second byte: length of the log leaf proof (the proof that the log belongs to a batch). + // - third byte: length of the batch leaf proof (the proof that the batch belongs to another settlement layer, if any). + // - the rest of the bytes are zeroes. + // + // In the future the old version will be disabled, and only the new version will be supported. + // For now, we need to support both for backwards compatibility. We distinguish between those based on whether the last 29 bytes are zeroes. + // It is safe, since the elements of the proof are hashes and are unlikely to have 29 zero bytes in them. + + // We shift left by 3 bytes = 24 bits to remove the top 24 bits of the metadata. + uint256 metadataAsUint256 = (uint256(proofMetadata) << 24); + + if (metadataAsUint256 == 0) { + // It is the new version + bytes1 metadataVersion = bytes1(proofMetadata); + require( + uint256(uint8(metadataVersion)) == SUPPORTED_PROOF_METADATA_VERSION, + "Mailbox: unsupported proof metadata version" + ); + + proofStartIndex = 1; + logLeafProofLen = uint256(uint8(proofMetadata[1])); + batchLeafProofLen = uint256(uint8(proofMetadata[2])); + } else { + // It is the old version + + // The entire proof is a merkle path + proofStartIndex = 0; + logLeafProofLen = _proof.length; + batchLeafProofLen = 0; + } + } + + function extractSlice( + bytes32[] calldata _proof, + uint256 _left, + uint256 _right + ) internal pure returns (bytes32[] memory slice) { + slice = new bytes32[](_right - _left); + for (uint256 i = _left; i < _right; i = i.uncheckedInc()) { + slice[i - _left] = _proof[i]; + } + } + + /// @notice Extracts slice until the end of the array. + /// @dev It is used in one place in order to circumvent the stack too deep error. + function extractSliceUntilEnd( + bytes32[] calldata _proof, + uint256 _start + ) internal pure returns (bytes32[] memory slice) { + slice = extractSlice(_proof, _start, _proof.length); + } + + /// @inheritdoc IMailbox + function proveL2LeafInclusion( + uint256 _batchNumber, + uint256 _leafProofMask, + bytes32 _leaf, + bytes32[] calldata _proof + ) external view override returns (bool) { + return _proveL2LeafInclusion(_batchNumber, _leafProofMask, _leaf, _proof); + } + + function _proveL2LeafInclusion( + uint256 _batchNumber, + uint256 _leafProofMask, + bytes32 _leaf, + bytes32[] calldata _proof + ) internal view returns (bool) { + if (_proof.length == 0) { + revert MerklePathEmpty(); + } + + uint256 ptr = 0; + bytes32 chainIdLeaf; + { + (uint256 proofStartIndex, uint256 logLeafProofLen, uint256 batchLeafProofLen) = _parseProofMetadata(_proof); + ptr = proofStartIndex; + + bytes32 batchSettlementRoot = Merkle.calculateRootMemory( + extractSlice(_proof, ptr, ptr + logLeafProofLen), + _leafProofMask, + _leaf + ); + ptr += logLeafProofLen; + + // If the `batchLeafProofLen` is 0, then we assume that this is L1 contract of the top-level + // in the aggregation, i.e. the batch root is stored here on L1. + if (batchLeafProofLen == 0) { + // Double checking that the batch has been executed. + if (_batchNumber > s.totalBatchesExecuted) { + revert BatchNotExecuted(_batchNumber); + } + + bytes32 correctBatchRoot = s.l2LogsRootHashes[_batchNumber]; + require(correctBatchRoot != bytes32(0), "local root is 0"); + return correctBatchRoot == batchSettlementRoot; + } + + require(s.l2LogsRootHashes[_batchNumber] == bytes32(0), "local root must be 0"); + + // Now, we'll have to check that the Gateway included the message. + bytes32 batchLeafHash = MessageHashing.batchLeafHash(batchSettlementRoot, _batchNumber); + + uint256 batchLeafProofMask = uint256(bytes32(_proof[ptr])); + ++ptr; + + bytes32 chainIdRoot = Merkle.calculateRootMemory( + extractSlice(_proof, ptr, ptr + batchLeafProofLen), + batchLeafProofMask, + batchLeafHash + ); + ptr += batchLeafProofLen; + + chainIdLeaf = MessageHashing.chainIdLeafHash(chainIdRoot, s.chainId); + } + + uint256 settlementLayerBatchNumber; + uint256 settlementLayerBatchRootMask; + address settlementLayerAddress; + + // Preventing stack too deep error + { + // Now, we just need to double check whether this chainId leaf was present in the tree. + uint256 settlementLayerPackedBatchInfo = uint256(_proof[ptr]); + ++ptr; + settlementLayerBatchNumber = uint256(settlementLayerPackedBatchInfo >> 128); + settlementLayerBatchRootMask = uint256(settlementLayerPackedBatchInfo & ((1 << 128) - 1)); + + uint256 settlementLayerChainId = uint256(_proof[ptr]); + ++ptr; + + // Assuming that `settlementLayerChainId` is an honest chain, the `chainIdLeaf` should belong + // to a chain's message root only if the chain has indeed executed its batch on top of it. + // + // We trust all chains whitelisted by the Bridgehub governance. + require(IBridgehub(s.bridgehub).whitelistedSettlementLayers(settlementLayerChainId), "Mailbox: wrong CTM"); + + settlementLayerAddress = IBridgehub(s.bridgehub).getZKChain(settlementLayerChainId); + } + + return + IMailbox(settlementLayerAddress).proveL2LeafInclusion( + settlementLayerBatchNumber, + settlementLayerBatchRootMask, + chainIdLeaf, + extractSliceUntilEnd(_proof, ptr) + ); + } + /// @dev Prove that a specific L2 log was sent in a specific L2 batch number function _proveL2LogInclusion( uint256 _batchNumber, @@ -113,28 +294,27 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { L2Log memory _log, bytes32[] calldata _proof ) internal view returns (bool) { - require(_batchNumber <= s.totalBatchesExecuted, "xx"); - bytes32 hashedLog = keccak256( // solhint-disable-next-line func-named-parameters abi.encodePacked(_log.l2ShardId, _log.isService, _log.txNumberInBatch, _log.sender, _log.key, _log.value) ); // Check that hashed log is not the default one, // otherwise it means that the value is out of range of sent L2 -> L1 logs - require(hashedLog != L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, "tw"); + if (hashedLog == L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH) { + revert HashedLogIsDefault(); + } // It is ok to not check length of `_proof` array, as length // of leaf preimage (which is `L2_TO_L1_LOG_SERIALIZE_SIZE`) is not // equal to the length of other nodes preimages (which are `2 * 32`) - bytes32 calculatedRootHash = Merkle.calculateRoot(_proof, _index, hashedLog); - bytes32 actualRootHash = s.l2LogsRootHashes[_batchNumber]; + // We can use `index` as a mask, since the `localMessageRoot` is on the left part of the tree. - return actualRootHash == calculatedRootHash; + return _proveL2LeafInclusion(_batchNumber, _index, hashedLog, _proof); } /// @dev Convert arbitrary-length message to the raw l2 log - function _L2MessageToLog(L2Message memory _message) internal pure returns (L2Log memory) { + function _L2MessageToLog(L2Message calldata _message) internal pure returns (L2Log memory) { return L2Log({ l2ShardId: 0, @@ -162,7 +342,9 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { /// @return The price of L2 gas in the base token function _deriveL2GasPrice(uint256 _l1GasPrice, uint256 _gasPerPubdata) internal view returns (uint256) { FeeParams memory feeParams = s.feeParams; - require(s.baseTokenGasPriceMultiplierDenominator > 0, "Mailbox: baseTokenGasPriceDenominator not set"); + if (s.baseTokenGasPriceMultiplierDenominator == 0) { + revert BaseTokenGasPriceDenominatorNotSet(); + } uint256 l1GasPriceConverted = (_l1GasPrice * s.baseTokenGasPriceMultiplierNominator) / s.baseTokenGasPriceMultiplierDenominator; uint256 pubdataPriceBaseToken; @@ -184,54 +366,56 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { } /// @inheritdoc IMailbox - function finalizeEthWithdrawal( - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external nonReentrant { - require(s.chainId == ERA_CHAIN_ID, "Mailbox: finalizeEthWithdrawal only available for Era on mailbox"); - IL1SharedBridge(s.baseTokenBridge).finalizeWithdrawal({ - _chainId: ERA_CHAIN_ID, - _l2BatchNumber: _l2BatchNumber, - _l2MessageIndex: _l2MessageIndex, - _l2TxNumberInBatch: _l2TxNumberInBatch, - _message: _message, - _merkleProof: _merkleProof + function requestL2TransactionToGatewayMailbox( + uint256 _chainId, + bytes32 _canonicalTxHash, + uint64 _expirationTimestamp + ) external override onlyL1 returns (bytes32 canonicalTxHash) { + require(IBridgehub(s.bridgehub).whitelistedSettlementLayers(s.chainId), "Mailbox SL: not SL"); + require(IChainTypeManager(s.chainTypeManager).getZKChain(_chainId) == msg.sender, "Mailbox SL: not zkChain"); + + BridgehubL2TransactionRequest memory wrappedRequest = _wrapRequest({ + _chainId: _chainId, + _canonicalTxHash: _canonicalTxHash, + _expirationTimestamp: _expirationTimestamp }); + canonicalTxHash = _requestL2TransactionToGatewayFree(wrappedRequest); } - /// @inheritdoc IMailbox - function requestL2Transaction( - address _contractL2, - uint256 _l2Value, - bytes calldata _calldata, - uint256 _l2GasLimit, - uint256 _l2GasPerPubdataByteLimit, - bytes[] calldata _factoryDeps, - address _refundRecipient - ) external payable returns (bytes32 canonicalTxHash) { - require(s.chainId == ERA_CHAIN_ID, "Mailbox: legacy interface only available for Era"); - canonicalTxHash = _requestL2TransactionSender( - BridgehubL2TransactionRequest({ - sender: msg.sender, - contractL2: _contractL2, - mintValue: msg.value, - l2Value: _l2Value, - l2GasLimit: _l2GasLimit, - l2Calldata: _calldata, - l2GasPerPubdataByteLimit: _l2GasPerPubdataByteLimit, - factoryDeps: _factoryDeps, - refundRecipient: _refundRecipient - }) - ); - IL1SharedBridge(s.baseTokenBridge).bridgehubDepositBaseToken{value: msg.value}( - s.chainId, - msg.sender, - ETH_TOKEN_ADDRESS, - msg.value + /// @inheritdoc IMailbox + function bridgehubRequestL2TransactionOnGateway( + bytes32 _canonicalTxHash, + uint64 _expirationTimestamp + ) external override onlyBridgehub { + _writePriorityOpHash(_canonicalTxHash, _expirationTimestamp); + emit NewRelayedPriorityTransaction(_getTotalPriorityTxs(), _canonicalTxHash, _expirationTimestamp); + } + + function _wrapRequest( + uint256 _chainId, + bytes32 _canonicalTxHash, + uint64 _expirationTimestamp + ) internal view returns (BridgehubL2TransactionRequest memory) { + // solhint-disable-next-line func-named-parameters + bytes memory data = abi.encodeCall( + IBridgehub(s.bridgehub).forwardTransactionOnGateway, + (_chainId, _canonicalTxHash, _expirationTimestamp) ); + return + BridgehubL2TransactionRequest({ + /// There is no sender for the wrapping, we use a virtual address. + sender: SETTLEMENT_LAYER_RELAY_SENDER, + contractL2: L2_BRIDGEHUB_ADDR, + mintValue: 0, + l2Value: 0, + // Very large amount + l2GasLimit: 72_000_000, + l2Calldata: data, + l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + factoryDeps: new bytes[](0), + // Tx is free, no so refund recipient needed + refundRecipient: address(0) + }); } function _requestL2TransactionSender( @@ -239,17 +423,18 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { ) internal nonReentrant returns (bytes32 canonicalTxHash) { // Check that the transaction is allowed by the filterer (if the filterer is set). if (s.transactionFilterer != address(0)) { - require( - ITransactionFilterer(s.transactionFilterer).isTransactionAllowed({ + if ( + !ITransactionFilterer(s.transactionFilterer).isTransactionAllowed({ sender: _request.sender, contractL2: _request.contractL2, mintValue: _request.mintValue, l2Value: _request.l2Value, l2Calldata: _request.l2Calldata, refundRecipient: _request.refundRecipient - }), - "tf" - ); + }) + ) { + revert TransactionNotAllowed(); + } } // Enforcing that `_request.l2GasPerPubdataByteLimit` equals to a certain constant number. This is needed @@ -257,7 +442,9 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { // VERY IMPORTANT: nobody should rely on this constant to be fixed and every contract should give their users the ability to provide the // ability to provide `_request.l2GasPerPubdataByteLimit` for each independent transaction. // CHANGING THIS CONSTANT SHOULD BE A CLIENT-SIDE CHANGE. - require(_request.l2GasPerPubdataByteLimit == REQUIRED_L2_GAS_PRICE_PER_PUBDATA, "qp"); + if (_request.l2GasPerPubdataByteLimit != REQUIRED_L2_GAS_PRICE_PER_PUBDATA) { + revert GasPerPubdataMismatch(); + } WritePriorityOpParams memory params; params.request = _request; @@ -268,17 +455,22 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { function _requestL2Transaction(WritePriorityOpParams memory _params) internal returns (bytes32 canonicalTxHash) { BridgehubL2TransactionRequest memory request = _params.request; - require(request.factoryDeps.length <= MAX_NEW_FACTORY_DEPS, "uj"); - _params.txId = s.priorityQueue.getTotalPriorityTxs(); + if (request.factoryDeps.length > MAX_NEW_FACTORY_DEPS) { + revert TooManyFactoryDeps(); + } + _params.txId = _nextPriorityTxId(); // Checking that the user provided enough ether to pay for the transaction. _params.l2GasPrice = _deriveL2GasPrice(tx.gasprice, request.l2GasPerPubdataByteLimit); uint256 baseCost = _params.l2GasPrice * request.l2GasLimit; - require(request.mintValue >= baseCost + request.l2Value, "mv"); // The `msg.value` doesn't cover the transaction cost + if (request.mintValue < baseCost + request.l2Value) { + revert MsgValueTooLow(baseCost + request.l2Value, request.mintValue); + } request.refundRecipient = AddressAliasHelper.actualRefundRecipient(request.refundRecipient, request.sender); // Change the sender address if it is a smart contract to prevent address collision between L1 and L2. - // Please note, currently zkSync address derivation is different from Ethereum one, but it may be changed in the future. + // Please note, currently ZKsync address derivation is different from Ethereum one, but it may be changed in the future. + // solhint-disable avoid-tx-origin // slither-disable-next-line tx-origin if (request.sender != tx.origin) { request.sender = AddressAliasHelper.applyL1ToL2Alias(request.sender); @@ -287,7 +479,41 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { // populate missing fields _params.expirationTimestamp = uint64(block.timestamp + PRIORITY_EXPIRATION); // Safe to cast - canonicalTxHash = _writePriorityOp(_params); + L2CanonicalTransaction memory transaction; + (transaction, canonicalTxHash) = _validateTx(_params); + + _writePriorityOp(transaction, _params.request.factoryDeps, canonicalTxHash, _params.expirationTimestamp); + if (s.settlementLayer != address(0)) { + // slither-disable-next-line unused-return + IMailbox(s.settlementLayer).requestL2TransactionToGatewayMailbox({ + _chainId: s.chainId, + _canonicalTxHash: canonicalTxHash, + _expirationTimestamp: _params.expirationTimestamp + }); + } + } + + function _nextPriorityTxId() internal view returns (uint256) { + if (s.priorityQueue.getFirstUnprocessedPriorityTx() >= s.priorityTree.startIndex) { + return s.priorityTree.getTotalPriorityTxs(); + } else { + return s.priorityQueue.getTotalPriorityTxs(); + } + } + + function _requestL2TransactionToGatewayFree( + BridgehubL2TransactionRequest memory _request + ) internal nonReentrant returns (bytes32 canonicalTxHash) { + WritePriorityOpParams memory params = WritePriorityOpParams({ + request: _request, + txId: _nextPriorityTxId(), + l2GasPrice: 0, + expirationTimestamp: uint64(block.timestamp + PRIORITY_EXPIRATION) + }); + + L2CanonicalTransaction memory transaction; + (transaction, canonicalTxHash) = _validateTx(params); + _writePriorityOp(transaction, params.request.factoryDeps, canonicalTxHash, params.expirationTimestamp); } function _serializeL2Transaction( @@ -315,40 +541,45 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { }); } - /// @notice Stores a transaction record in storage & send event about that - function _writePriorityOp( + function _validateTx( WritePriorityOpParams memory _priorityOpParams - ) internal returns (bytes32 canonicalTxHash) { - L2CanonicalTransaction memory transaction = _serializeL2Transaction(_priorityOpParams); - + ) internal view returns (L2CanonicalTransaction memory transaction, bytes32 canonicalTxHash) { + transaction = _serializeL2Transaction(_priorityOpParams); bytes memory transactionEncoding = abi.encode(transaction); - TransactionValidator.validateL1ToL2Transaction( transaction, transactionEncoding, s.priorityTxMaxGasLimit, s.feeParams.priorityTxMaxPubdata ); - canonicalTxHash = keccak256(transactionEncoding); + } - s.priorityQueue.pushBack( - PriorityOperation({ - canonicalTxHash: canonicalTxHash, - expirationTimestamp: _priorityOpParams.expirationTimestamp, - layer2Tip: uint192(0) // TODO: Restore after fee modeling will be stable. (SMA-1230) - }) - ); + /// @notice Stores a transaction record in storage & send event about that + function _writePriorityOp( + L2CanonicalTransaction memory _transaction, + bytes[] memory _factoryDeps, + bytes32 _canonicalTxHash, + uint64 _expirationTimestamp + ) internal { + _writePriorityOpHash(_canonicalTxHash, _expirationTimestamp); // Data that is needed for the operator to simulate priority queue offchain // solhint-disable-next-line func-named-parameters - emit NewPriorityRequest( - _priorityOpParams.txId, - canonicalTxHash, - _priorityOpParams.expirationTimestamp, - transaction, - _priorityOpParams.request.factoryDeps - ); + emit NewPriorityRequest(_transaction.nonce, _canonicalTxHash, _expirationTimestamp, _transaction, _factoryDeps); + } + + function _writePriorityOpHash(bytes32 _canonicalTxHash, uint64 _expirationTimestamp) internal { + if (s.priorityTree.startIndex > s.priorityQueue.getFirstUnprocessedPriorityTx()) { + s.priorityQueue.pushBack( + PriorityOperation({ + canonicalTxHash: _canonicalTxHash, + expirationTimestamp: _expirationTimestamp, + layer2Tip: uint192(0) // TODO: Restore after fee modeling will be stable. (SMA-1230) + }) + ); + } + s.priorityTree.push(_canonicalTxHash); } /// @notice Hashes the L2 bytecodes and returns them in the format in which they are processed by the bootloader @@ -364,4 +595,64 @@ contract MailboxFacet is ZkSyncHyperchainBase, IMailbox { } } } + + /////////////////////////////////////////////////////// + //////// Legacy Era functions + + /// @inheritdoc IMailbox + function finalizeEthWithdrawal( + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes calldata _message, + bytes32[] calldata _merkleProof + ) external nonReentrant onlyL1 { + if (s.chainId != ERA_CHAIN_ID) { + revert OnlyEraSupported(); + } + address sharedBridge = IBridgehub(s.bridgehub).sharedBridge(); + IL1AssetRouter(sharedBridge).finalizeWithdrawal({ + _chainId: ERA_CHAIN_ID, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _message: _message, + _merkleProof: _merkleProof + }); + } + + /// @inheritdoc IMailbox + function requestL2Transaction( + address _contractL2, + uint256 _l2Value, + bytes calldata _calldata, + uint256 _l2GasLimit, + uint256 _l2GasPerPubdataByteLimit, + bytes[] calldata _factoryDeps, + address _refundRecipient + ) external payable onlyL1 returns (bytes32 canonicalTxHash) { + if (s.chainId != ERA_CHAIN_ID) { + revert OnlyEraSupported(); + } + canonicalTxHash = _requestL2TransactionSender( + BridgehubL2TransactionRequest({ + sender: msg.sender, + contractL2: _contractL2, + mintValue: msg.value, + l2Value: _l2Value, + l2GasLimit: _l2GasLimit, + l2Calldata: _calldata, + l2GasPerPubdataByteLimit: _l2GasPerPubdataByteLimit, + factoryDeps: _factoryDeps, + refundRecipient: _refundRecipient + }) + ); + address sharedBridge = IBridgehub(s.bridgehub).sharedBridge(); + IL1AssetRouter(sharedBridge).bridgehubDepositBaseToken{value: msg.value}( + s.chainId, + s.baseTokenAssetId, + msg.sender, + msg.value + ); + } } diff --git a/l1-contracts/contracts/state-transition/chain-deps/facets/ZKChainBase.sol b/l1-contracts/contracts/state-transition/chain-deps/facets/ZKChainBase.sol new file mode 100644 index 000000000..6c8a08657 --- /dev/null +++ b/l1-contracts/contracts/state-transition/chain-deps/facets/ZKChainBase.sol @@ -0,0 +1,72 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {ZKChainStorage} from "../ZKChainStorage.sol"; +import {ReentrancyGuard} from "../../../common/ReentrancyGuard.sol"; +import {PriorityQueue} from "../../libraries/PriorityQueue.sol"; +import {PriorityTree} from "../../libraries/PriorityTree.sol"; +import {Unauthorized} from "../../../common/L1ContractErrors.sol"; + +/// @title Base contract containing functions accessible to the other facets. +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +contract ZKChainBase is ReentrancyGuard { + using PriorityQueue for PriorityQueue.Queue; + using PriorityTree for PriorityTree.Tree; + + // slither-disable-next-line uninitialized-state + ZKChainStorage internal s; + + /// @notice Checks that the message sender is an active admin + modifier onlyAdmin() { + if (msg.sender != s.admin) { + revert Unauthorized(msg.sender); + } + _; + } + + /// @notice Checks if validator is active + modifier onlyValidator() { + if (!s.validators[msg.sender]) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyChainTypeManager() { + if (msg.sender != s.chainTypeManager) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyBridgehub() { + if (msg.sender != s.bridgehub) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyAdminOrChainTypeManager() { + if (msg.sender != s.admin && msg.sender != s.chainTypeManager) { + revert Unauthorized(msg.sender); + } + _; + } + + modifier onlyValidatorOrChainTypeManager() { + if (!s.validators[msg.sender] && msg.sender != s.chainTypeManager) { + revert Unauthorized(msg.sender); + } + _; + } + + function _getTotalPriorityTxs() internal view returns (uint256) { + if (s.priorityQueue.getFirstUnprocessedPriorityTx() >= s.priorityTree.startIndex) { + return s.priorityTree.getTotalPriorityTxs(); + } else { + return s.priorityQueue.getTotalPriorityTxs(); + } + } +} diff --git a/l1-contracts/contracts/state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol b/l1-contracts/contracts/state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol deleted file mode 100644 index 59662d409..000000000 --- a/l1-contracts/contracts/state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol +++ /dev/null @@ -1,57 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {ZkSyncHyperchainStorage} from "../ZkSyncHyperchainStorage.sol"; -import {ReentrancyGuard} from "../../../common/ReentrancyGuard.sol"; - -/// @title Base contract containing functions accessible to the other facets. -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -contract ZkSyncHyperchainBase is ReentrancyGuard { - // slither-disable-next-line uninitialized-state - ZkSyncHyperchainStorage internal s; - - /// @notice Checks that the message sender is an active admin - modifier onlyAdmin() { - require(msg.sender == s.admin, "Hyperchain: not admin"); - _; - } - - /// @notice Checks if validator is active - modifier onlyValidator() { - require(s.validators[msg.sender], "Hyperchain: not validator"); - _; - } - - modifier onlyStateTransitionManager() { - require(msg.sender == s.stateTransitionManager, "Hyperchain: not state transition manager"); - _; - } - - modifier onlyBridgehub() { - require(msg.sender == s.bridgehub, "Hyperchain: not bridgehub"); - _; - } - - modifier onlyAdminOrStateTransitionManager() { - require( - msg.sender == s.admin || msg.sender == s.stateTransitionManager, - "Hyperchain: Only by admin or state transition manager" - ); - _; - } - - modifier onlyValidatorOrStateTransitionManager() { - require( - s.validators[msg.sender] || msg.sender == s.stateTransitionManager, - "Hyperchain: Only by validator or state transition manager" - ); - _; - } - - modifier onlyBaseTokenBridge() { - require(msg.sender == s.baseTokenBridge, "Hyperchain: Only base token bridge can call this function"); - _; - } -} diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IAdmin.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IAdmin.sol index f2252fee6..daf155e3a 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IAdmin.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IAdmin.sol @@ -1,16 +1,17 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {IZkSyncHyperchainBase} from "../chain-interfaces/IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "../chain-interfaces/IZKChainBase.sol"; import {Diamond} from "../libraries/Diamond.sol"; -import {FeeParams, PubdataPricingMode} from "../chain-deps/ZkSyncHyperchainStorage.sol"; +import {FeeParams, PubdataPricingMode} from "../chain-deps/ZKChainStorage.sol"; +import {ZKChainCommitment} from "../../common/Config.sol"; /// @title The interface of the Admin Contract that controls access rights for contract management. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -interface IAdmin is IZkSyncHyperchainBase { +interface IAdmin is IZKChainBase { /// @notice Starts the transfer of admin rights. Only the current admin can propose a new pending one. /// @notice New admin can accept admin rights by calling `acceptAdmin` function. /// @param _newPendingAdmin Address of the new admin @@ -61,9 +62,24 @@ interface IAdmin is IZkSyncHyperchainBase { function freezeDiamond() external; /// @notice Unpause the functionality of all freezable facets & their selectors - /// @dev Both the admin and the STM can unfreeze Diamond Proxy + /// @dev Both the admin and the CTM can unfreeze Diamond Proxy function unfreezeDiamond() external; + function genesisUpgrade( + address _l1GenesisUpgrade, + address _ctmDeployer, + bytes calldata _forceDeploymentData, + bytes[] calldata _factoryDeps + ) external; + + /// @notice Set the L1 DA validator address as well as the L2 DA validator address. + /// @dev While in principle it is possible that updating only one of the addresses is needed, + /// usually these should work in pair and L1 validator typically expects a specific input from the L2 Validator. + /// That's why we change those together to prevent admins of chains from shooting themselves in the foot. + /// @param _l1DAValidator The address of the L1 DA validator + /// @param _l2DAValidator The address of the L2 DA validator + function setDAValidatorPair(address _l1DAValidator, address _l2DAValidator) external; + /// @notice Porter availability status changes event IsPorterAvailableStatusUpdate(bool isPorterAvailable); @@ -100,9 +116,40 @@ interface IAdmin is IZkSyncHyperchainBase { /// @notice Emitted when an upgrade is executed. event ExecuteUpgrade(Diamond.DiamondCutData diamondCut); + /// @notice Emitted when the migration to the new settlement layer is complete. + event MigrationComplete(); + /// @notice Emitted when the contract is frozen. event Freeze(); /// @notice Emitted when the contract is unfrozen. event Unfreeze(); + + /// @notice New pair of DA validators set + event NewL2DAValidator(address indexed oldL2DAValidator, address indexed newL2DAValidator); + event NewL1DAValidator(address indexed oldL1DAValidator, address indexed newL1DAValidator); + + event BridgeInitialize(address indexed l1Token, string name, string symbol, uint8 decimals); + + event BridgeMint(address indexed _account, uint256 _amount); + + /// @dev Similar to IL1AssetHandler interface, used to send chains. + function forwardedBridgeBurn( + address _settlementLayer, + address _originalCaller, + bytes calldata _data + ) external payable returns (bytes memory _bridgeMintData); + + /// @dev Similar to IL1AssetHandler interface, used to claim failed chain transfers. + function forwardedBridgeRecoverFailedTransfer( + uint256 _chainId, + bytes32 _assetInfo, + address _originalCaller, + bytes calldata _chainData + ) external payable; + + /// @dev Similar to IL1AssetHandler interface, used to receive chains. + function forwardedBridgeMint(bytes calldata _data, bool _contractAlreadyDeployed) external payable; + + function prepareChainCommitment() external view returns (ZKChainCommitment memory commitment); } diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IDiamondInit.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IDiamondInit.sol index eaa61c3e5..e8dbe1030 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IDiamondInit.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IDiamondInit.sol @@ -1,37 +1,42 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; import {IVerifier, VerifierParams} from "./IVerifier.sol"; -import {FeeParams} from "../chain-deps/ZkSyncHyperchainStorage.sol"; +import {FeeParams} from "../chain-deps/ZKChainStorage.sol"; /// @param chainId the id of the chain /// @param bridgehub the address of the bridgehub contract -/// @param stateTransitionManager contract's address +/// @param chainTypeManager contract's address /// @param protocolVersion initial protocol version /// @param validatorTimelock address of the validator timelock that delays execution /// @param admin address who can manage the contract -/// @param baseToken address of the base token of the chain -/// @param baseTokenBridge address of the L1 shared bridge contract +/// @param baseTokenAssetId asset id of the base token of the chain /// @param storedBatchZero hash of the initial genesis batch -/// @param verifier address of Verifier contract +/// @param dualVerifier address of Wrapper Verifier contract +/// @param plonkVerifier address of PLONK Verifier contract +/// @param fflonkVerifier address of FFLONK Verifier contract +/// @param fflonkProofLength length of the FFLONK proof type /// @param verifierParams Verifier config parameters that describes the circuit to be verified /// @param l2BootloaderBytecodeHash The hash of bootloader L2 bytecode /// @param l2DefaultAccountBytecodeHash The hash of default account L2 bytecode /// @param priorityTxMaxGasLimit maximum number of the L2 gas that a user can request for L1 -> L2 transactions /// @param feeParams Fee parameters to be used for L1->L2 transactions /// @param blobVersionedHashRetriever Address of contract used to pull the blob versioned hash for a transaction. +// solhint-disable-next-line gas-struct-packing struct InitializeData { uint256 chainId; address bridgehub; - address stateTransitionManager; + address chainTypeManager; uint256 protocolVersion; address admin; address validatorTimelock; - address baseToken; - address baseTokenBridge; + bytes32 baseTokenAssetId; bytes32 storedBatchZero; - IVerifier verifier; + IVerifier dualVerifier; + address plonkVerifier; + address fflonkVerifier; + uint256 fflonkProofLength; VerifierParams verifierParams; bytes32 l2BootloaderBytecodeHash; bytes32 l2DefaultAccountBytecodeHash; @@ -40,7 +45,10 @@ struct InitializeData { address blobVersionedHashRetriever; } -/// @param verifier address of Verifier contract +/// @param dualVerifier address of Wrapper Verifier contract +/// @param plonkVerifier address of PLONK Verifier contract +/// @param fflonkVerifier address of FFLONK Verifier contract +/// @param fflonkProofLength length of the FFLONK proof type /// @param verifierParams Verifier config parameters that describes the circuit to be verified /// @param l2BootloaderBytecodeHash The hash of bootloader L2 bytecode /// @param l2DefaultAccountBytecodeHash The hash of default account L2 bytecode @@ -48,7 +56,10 @@ struct InitializeData { /// @param feeParams Fee parameters to be used for L1->L2 transactions /// @param blobVersionedHashRetriever Address of contract used to pull the blob versioned hash for a transaction. struct InitializeDataNewChain { - IVerifier verifier; + IVerifier dualVerifier; + address plonkVerifier; + address fflonkVerifier; + uint256 fflonkProofLength; VerifierParams verifierParams; bytes32 l2BootloaderBytecodeHash; bytes32 l2DefaultAccountBytecodeHash; diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IExecutor.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IExecutor.sol index 43a7485c7..0877dcbf9 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IExecutor.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IExecutor.sol @@ -1,33 +1,21 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {IZkSyncHyperchainBase} from "./IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "./IZKChainBase.sol"; /// @dev Enum used by L2 System Contracts to differentiate logs. enum SystemLogKey { L2_TO_L1_LOGS_TREE_ROOT_KEY, - TOTAL_L2_TO_L1_PUBDATA_KEY, - STATE_DIFF_HASH_KEY, PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, PREV_BATCH_HASH_KEY, CHAINED_PRIORITY_TXN_HASH_KEY, NUMBER_OF_LAYER_1_TXS_KEY, - BLOB_ONE_HASH_KEY, - BLOB_TWO_HASH_KEY, - BLOB_THREE_HASH_KEY, - BLOB_FOUR_HASH_KEY, - BLOB_FIVE_HASH_KEY, - BLOB_SIX_HASH_KEY, + L2_DA_VALIDATOR_OUTPUT_HASH_KEY, + USED_L2_DA_VALIDATOR_ADDRESS_KEY, EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY } -/// @dev Enum used to determine the source of pubdata. At first we will support calldata and blobs but this can be extended. -enum PubdataSource { - Calldata, - Blob -} - struct LogProcessingOutput { uint256 numberOfLayer1Txs; bytes32 chainedPriorityTxsHash; @@ -36,14 +24,9 @@ struct LogProcessingOutput { bytes32 stateDiffHash; bytes32 l2LogsTreeRoot; uint256 packedBatchAndL2BlockTimestamp; - bytes32[] blobHashes; + bytes32 l2DAValidatorOutputHash; } -/// @dev Total number of bytes in a blob. Blob = 4096 field elements * 31 bytes per field element -/// @dev EIP-4844 defines it as 131_072 but we use 4096 * 31 within our circuits to always fit within a field element -/// @dev Our circuits will prove that a EIP-4844 blob and our internal blob are the same. -uint256 constant BLOB_SIZE_BYTES = 126_976; - /// @dev Offset used to pull Address From Log. Equal to 4 (bytes for isService) uint256 constant L2_LOG_ADDRESS_OFFSET = 4; @@ -53,20 +36,6 @@ uint256 constant L2_LOG_KEY_OFFSET = 24; /// @dev Offset used to pull Value From Log. Equal to 4 (bytes for isService) + 20 (bytes for address) + 32 (bytes for key) uint256 constant L2_LOG_VALUE_OFFSET = 56; -/// @dev BLS Modulus value defined in EIP-4844 and the magic value returned from a successful call to the -/// point evaluation precompile -uint256 constant BLS_MODULUS = 52435875175126190479447740508185965837690552500527637822603658699938581184513; - -/// @dev Packed pubdata commitments. -/// @dev Format: list of: opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes)) = 144 bytes -uint256 constant PUBDATA_COMMITMENT_SIZE = 144; - -/// @dev Offset in pubdata commitment of blobs for claimed value -uint256 constant PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET = 16; - -/// @dev Offset in pubdata commitment of blobs for kzg commitment -uint256 constant PUBDATA_COMMITMENT_COMMITMENT_OFFSET = 48; - /// @dev Max number of blobs currently supported uint256 constant MAX_NUMBER_OF_BLOBS = 6; @@ -75,10 +44,10 @@ uint256 constant MAX_NUMBER_OF_BLOBS = 6; /// than the maximal number of blobs supported by the contract (`MAX_NUMBER_OF_BLOBS`). uint256 constant TOTAL_BLOBS_IN_COMMITMENT = 16; -/// @title The interface of the zkSync Executor contract capable of processing events emitted in the zkSync protocol. +/// @title The interface of the ZKsync Executor contract capable of processing events emitted in the ZKsync protocol. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -interface IExecutor is IZkSyncHyperchainBase { +interface IExecutor is IZKChainBase { /// @notice Rollup batch stored data /// @param batchNumber Rollup batch number /// @param batchHash Hash of L2 batch @@ -87,7 +56,8 @@ interface IExecutor is IZkSyncHyperchainBase { /// @param priorityOperationsHash Hash of all priority operations from this batch /// @param l2LogsTreeRoot Root hash of tree that contains L2 -> L1 messages from this batch /// @param timestamp Rollup batch timestamp, have the same format as Ethereum batch constant - /// @param commitment Verified input for the zkSync circuit + /// @param commitment Verified input for the ZKsync circuit + // solhint-disable-next-line gas-struct-packing struct StoredBatchInfo { uint64 batchNumber; bytes32 batchHash; @@ -109,7 +79,7 @@ interface IExecutor is IZkSyncHyperchainBase { /// @param bootloaderHeapInitialContentsHash Hash of the initial contents of the bootloader heap. In practice it serves as the commitment to the transactions in the batch. /// @param eventsQueueStateHash Hash of the events queue state. In practice it serves as the commitment to the events in the batch. /// @param systemLogs concatenation of all L2 -> L1 system logs in the batch - /// @param pubdataCommitments Packed pubdata commitments/data. + /// @param operatorDAInput Packed pubdata commitments/data. /// @dev pubdataCommitments format: This will always start with a 1 byte pubdataSource flag. Current allowed values are 0 (calldata) or 1 (blobs) /// kzg: list of: opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes) = 144 bytes /// calldata: pubdataCommitments.length - 1 - 32 bytes of pubdata @@ -127,74 +97,62 @@ interface IExecutor is IZkSyncHyperchainBase { bytes32 bootloaderHeapInitialContentsHash; bytes32 eventsQueueStateHash; bytes systemLogs; - bytes pubdataCommitments; - } - - /// @notice Recursive proof input data (individual commitments are constructed onchain) - struct ProofInput { - uint256[] recursiveAggregationInput; - uint256[] serializedProof; + bytes operatorDAInput; } /// @notice Function called by the operator to commit new batches. It is responsible for: /// - Verifying the correctness of their timestamps. /// - Processing their L2->L1 logs. /// - Storing batch commitments. - /// @param _lastCommittedBatchData Stored data of the last committed batch. - /// @param _newBatchesData Data of the new batches to be committed. - function commitBatches( - StoredBatchInfo calldata _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData - ) external; - - /// @notice same as `commitBatches` but with the chainId so ValidatorTimelock can sort the inputs. + /// @param _chainId Chain ID of the chain. + /// @param _processFrom The batch number from which the processing starts. + /// @param _processTo The batch number at which the processing ends. + /// @param _commitData The encoded data of the new batches to be committed. function commitBatchesSharedBridge( uint256 _chainId, - StoredBatchInfo calldata _lastCommittedBatchData, - CommitBatchInfo[] calldata _newBatchesData + uint256 _processFrom, + uint256 _processTo, + bytes calldata _commitData ) external; /// @notice Batches commitment verification. /// @dev Only verifies batch commitments without any other processing. - /// @param _prevBatch Stored data of the last committed batch. - /// @param _committedBatches Stored data of the committed batches. - /// @param _proof The zero knowledge proof. - function proveBatches( - StoredBatchInfo calldata _prevBatch, - StoredBatchInfo[] calldata _committedBatches, - ProofInput calldata _proof - ) external; - - /// @notice same as `proveBatches` but with the chainId so ValidatorTimelock can sort the inputs. + /// @param _chainId Chain ID of the chain. + /// @param _processBatchFrom The batch number from which the verification starts. + /// @param _processBatchTo The batch number at which the verification ends. + /// @param _proofData The encoded data of the new batches to be verified. function proveBatchesSharedBridge( uint256 _chainId, - StoredBatchInfo calldata _prevBatch, - StoredBatchInfo[] calldata _committedBatches, - ProofInput calldata _proof + uint256 _processBatchFrom, + uint256 _processBatchTo, + bytes calldata _proofData ) external; /// @notice The function called by the operator to finalize (execute) batches. It is responsible for: /// - Processing all pending operations (commpleting priority requests). /// - Finalizing this batch (i.e. allowing to withdraw funds from the system) - /// @param _batchesData Data of the batches to be executed. - function executeBatches(StoredBatchInfo[] calldata _batchesData) external; - - /// @notice same as `executeBatches` but with the chainId so ValidatorTimelock can sort the inputs. - function executeBatchesSharedBridge(uint256 _chainId, StoredBatchInfo[] calldata _batchesData) external; + /// @param _chainId Chain ID of the chain. + /// @param _processFrom The batch number from which the execution starts. + /// @param _processTo The batch number at which the execution ends. + /// @param _executeData The encoded data of the new batches to be executed. + function executeBatchesSharedBridge( + uint256 _chainId, + uint256 _processFrom, + uint256 _processTo, + bytes calldata _executeData + ) external; /// @notice Reverts unexecuted batches + /// @param _chainId Chain ID of the chain /// @param _newLastBatch batch number after which batches should be reverted /// NOTE: Doesn't delete the stored data about batches, but only decreases /// counters that are responsible for the number of batches - function revertBatches(uint256 _newLastBatch) external; - - /// @notice same as `revertBatches` but with the chainId so ValidatorTimelock can sort the inputs. function revertBatchesSharedBridge(uint256 _chainId, uint256 _newLastBatch) external; /// @notice Event emitted when a batch is committed /// @param batchNumber Number of the batch committed /// @param batchHash Hash of the L2 batch - /// @param commitment Calculated input for the zkSync circuit + /// @param commitment Calculated input for the ZKsync circuit /// @dev It has the name "BlockCommit" and not "BatchCommit" due to backward compatibility considerations event BlockCommit(uint256 indexed batchNumber, bytes32 indexed batchHash, bytes32 indexed commitment); @@ -207,7 +165,7 @@ interface IExecutor is IZkSyncHyperchainBase { /// @notice Event emitted when a batch is executed /// @param batchNumber Number of the batch executed /// @param batchHash Hash of the L2 batch - /// @param commitment Verified input for the zkSync circuit + /// @param commitment Verified input for the ZKsync circuit /// @dev It has the name "BlockExecution" and not "BatchExecution" due to backward compatibility considerations event BlockExecution(uint256 indexed batchNumber, bytes32 indexed batchHash, bytes32 indexed commitment); diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IGetters.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IGetters.sol index 9d77efdf3..4de3a0067 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IGetters.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IGetters.sol @@ -1,22 +1,30 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {PriorityOperation} from "../libraries/PriorityQueue.sol"; import {VerifierParams} from "../chain-interfaces/IVerifier.sol"; -import {PubdataPricingMode} from "../chain-deps/ZkSyncHyperchainStorage.sol"; -import {IZkSyncHyperchainBase} from "./IZkSyncHyperchainBase.sol"; +import {PubdataPricingMode} from "../chain-deps/ZKChainStorage.sol"; +import {IZKChainBase} from "./IZKChainBase.sol"; /// @title The interface of the Getters Contract that implements functions for getting contract state from outside the blockchain. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -interface IGetters is IZkSyncHyperchainBase { +/// @dev Most of the methods simply return the values that correspond to the current diamond proxy and possibly +/// not to the ZK Chain as a whole. For example, if the chain is migrated to another settlement layer, the values returned +/// by this facet will correspond to the values stored on this chain and possilbly not the canonical state of the chain. +interface IGetters is IZKChainBase { /*////////////////////////////////////////////////////////////// CUSTOM GETTERS //////////////////////////////////////////////////////////////*/ - /// @return The address of the verifier smart contract - function getVerifier() external view returns (address); + /// @return The address of the Wrapper verifier smart contract + function getDualVerifier() external view returns (address); + /// @return The address of the PLONK verifier smart contract + function getPlonkVerifier() external view returns (address); + /// @return The address of the FFLONK verifier smart contract + function getFflonkVerifier() external view returns (address); + /// @return The length of the FFLONK proof type + function getFflonkProofLength() external view returns (uint256); /// @return The address of the current admin function getAdmin() external view returns (address); @@ -28,13 +36,16 @@ interface IGetters is IZkSyncHyperchainBase { function getBridgehub() external view returns (address); /// @return The address of the state transition - function getStateTransitionManager() external view returns (address); + function getChainTypeManager() external view returns (address); + + /// @return The chain ID + function getChainId() external view returns (uint256); /// @return The address of the base token function getBaseToken() external view returns (address); - /// @return The address of the base token bridge - function getBaseTokenBridge() external view returns (address); + /// @return The address of the base token + function getBaseTokenAssetId() external view returns (bytes32); /// @return The total number of batches that were committed function getTotalBatchesCommitted() external view returns (uint256); @@ -45,9 +56,15 @@ interface IGetters is IZkSyncHyperchainBase { /// @return The total number of batches that were committed & verified & executed function getTotalBatchesExecuted() external view returns (uint256); + // @return Address of transaction filterer + function getTransactionFilterer() external view returns (address); + /// @return The total number of priority operations that were added to the priority queue, including all processed ones function getTotalPriorityTxs() external view returns (uint256); + /// @return The root hash of the priority tree + function getPriorityTreeRoot() external view returns (bytes32); + /// @notice The function that returns the first unprocessed priority transaction. /// @dev Returns zero if and only if no operations were processed from the queue. /// @dev If all the transactions were processed, it will return the last processed index, so @@ -58,9 +75,6 @@ interface IGetters is IZkSyncHyperchainBase { /// @return The number of priority operations currently in the queue function getPriorityQueueSize() external view returns (uint256); - /// @return The first unprocessed priority operation from the queue - function priorityQueueFrontOperation() external view returns (PriorityOperation memory); - /// @return Whether the address has a validator access function isValidator(address _address) external view returns (bool); @@ -147,4 +161,7 @@ interface IGetters is IZkSyncHyperchainBase { /// @return isFreezable Whether the facet can be frozen by the admin or always accessible function isFacetFreezable(address _facet) external view returns (bool isFreezable); + + /// @return The address of the current settlement layer. + function getSettlementLayer() external view returns (address); } diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IL1DAValidator.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IL1DAValidator.sol new file mode 100644 index 000000000..a4fe56b01 --- /dev/null +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IL1DAValidator.sol @@ -0,0 +1,41 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +/// @dev Enum used to determine the source of pubdata. At first we will support calldata and blobs but this can be extended. +enum PubdataSource { + Calldata, + Blob +} + +struct L1DAValidatorOutput { + /// @dev The hash of the uncompressed state diff. + bytes32 stateDiffHash; + /// @dev The hashes of the blobs on L1. The array is dynamic to account for forward compatibility. + /// The length of it must be equal to `maxBlobsSupported`. + bytes32[] blobsLinearHashes; + /// @dev The commitments to the blobs on L1. The array is dynamic to account for forward compatibility. + /// Its length must be equal to the length of blobsLinearHashes. + /// @dev If the system supports more blobs than returned, the rest of the array should be filled with zeros. + bytes32[] blobsOpeningCommitments; +} + +interface IL1DAValidator { + /// @notice The function that checks the data availability for the given batch input. + /// @param _chainId The chain id of the chain that is being committed. + /// @param _chainId The batch number for which the data availability is being checked. + /// @param _l2DAValidatorOutputHash The hash of that was returned by the l2DAValidator. + /// @param _operatorDAInput The DA input by the operator provided on L1. + /// @param _maxBlobsSupported The maximal number of blobs supported by the chain. + /// We provide this value for future compatibility. + /// This is needed because the corresponding `blobsLinearHashes`/`blobsOpeningCommitments` + /// in the `L1DAValidatorOutput` struct will have to have this length as it is required + /// to be static by the circuits. + function checkDA( + uint256 _chainId, + uint256 _batchNumber, + bytes32 _l2DAValidatorOutputHash, + bytes calldata _operatorDAInput, + uint256 _maxBlobsSupported + ) external returns (L1DAValidatorOutput memory output); +} diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/ILegacyGetters.sol b/l1-contracts/contracts/state-transition/chain-interfaces/ILegacyGetters.sol index 5d3c36094..9c143d93e 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/ILegacyGetters.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/ILegacyGetters.sol @@ -1,14 +1,14 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {IZkSyncHyperchainBase} from "./IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "./IZKChainBase.sol"; /// @author Matter Labs -/// @dev This interface contains getters for the zkSync contract that should not be used, +/// @dev This interface contains getters for the ZKsync contract that should not be used, /// but still are kept for backward compatibility. /// @custom:security-contact security@matterlabs.dev -interface ILegacyGetters is IZkSyncHyperchainBase { +interface ILegacyGetters is IZKChainBase { /// @return The total number of batches that were committed /// @dev It is a *deprecated* method, please use `getTotalBatchesCommitted` instead function getTotalBlocksCommitted() external view returns (uint256); diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IMailbox.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IMailbox.sol index b0b535d68..e63832aa7 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IMailbox.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IMailbox.sol @@ -1,14 +1,14 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {IZkSyncHyperchainBase} from "./IZkSyncHyperchainBase.sol"; +import {IZKChainBase} from "./IZKChainBase.sol"; import {L2CanonicalTransaction, L2Log, L2Message, TxStatus, BridgehubL2TransactionRequest} from "../../common/Messaging.sol"; -/// @title The interface of the zkSync Mailbox contract that provides interfaces for L1 <-> L2 interaction. +/// @title The interface of the ZKsync Mailbox contract that provides interfaces for L1 <-> L2 interaction. /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev -interface IMailbox is IZkSyncHyperchainBase { +interface IMailbox is IZKChainBase { /// @notice Prove that a specific arbitrary-length message was sent in a specific L2 batch number /// @param _batchNumber The executed L2 batch number in which the message appeared /// @param _index The position in the L2 logs Merkle tree of the l2Log that was sent with the message @@ -95,10 +95,24 @@ interface IMailbox is IZkSyncHyperchainBase { address _refundRecipient ) external payable returns (bytes32 canonicalTxHash); + /// @notice when requesting transactions through the bridgehub function bridgehubRequestL2Transaction( BridgehubL2TransactionRequest calldata _request ) external returns (bytes32 canonicalTxHash); + /// @dev On the Gateway the chain's mailbox receives the tx from the bridgehub. + function bridgehubRequestL2TransactionOnGateway(bytes32 _canonicalTxHash, uint64 _expirationTimestamp) external; + + /// @dev On L1 we have to forward to the Gateway's mailbox which sends to the Bridgehub on the Gw + /// @param _chainId the chainId of the chain + /// @param _canonicalTxHash the canonical transaction hash + /// @param _expirationTimestamp the expiration timestamp + function requestL2TransactionToGatewayMailbox( + uint256 _chainId, + bytes32 _canonicalTxHash, + uint64 _expirationTimestamp + ) external returns (bytes32 canonicalTxHash); + /// @notice Estimates the cost in Ether of requesting execution of an L2 transaction from L1 /// @param _gasPrice expected L1 gas price at which the user requests the transaction execution /// @param _l2GasLimit Maximum amount of L2 gas that transaction can consume during execution on L2 @@ -110,8 +124,33 @@ interface IMailbox is IZkSyncHyperchainBase { uint256 _l2GasPerPubdataByteLimit ) external view returns (uint256); + /// Proves that a certain leaf was included as part of the log merkle tree. + function proveL2LeafInclusion( + uint256 _batchNumber, + uint256 _batchRootMask, + bytes32 _leaf, + bytes32[] calldata _proof + ) external view returns (bool); + /// @notice transfer Eth to shared bridge as part of migration process - function transferEthToSharedBridge() external; + // function transferEthToSharedBridge() external; + + // function relayTxSL( + // address _to, + // L2CanonicalTransaction memory _transaction, + // bytes[] memory _factoryDeps, + // bytes32 _canonicalTxHash, + // uint64 _expirationTimestamp + // ) external; + + // function freeAcceptTx( + // L2CanonicalTransaction memory _transaction, + // bytes[] memory _factoryDeps, + // bytes32 _canonicalTxHash, + // uint64 _expirationTimestamp + // ) external; + + // function acceptFreeRequestFromBridgehub(BridgehubL2TransactionRequest calldata _request) external; /// @notice New priority request event. Emitted when a request is placed into the priority queue /// @param txId Serial number of the priority operation @@ -127,4 +166,13 @@ interface IMailbox is IZkSyncHyperchainBase { L2CanonicalTransaction transaction, bytes[] factoryDeps ); + + /// @notice New relayed priority request event. It is emitted on a chain that is deployed + /// on top of the gateway when it receives a request relayed via the Bridgehub. + /// @dev IMPORTANT: this event most likely will be removed in the future, so + /// no one should rely on it for indexing purposes. + /// @param txId Serial number of the priority operation + /// @param txHash keccak256 hash of encoded transaction representation + /// @param expirationTimestamp Timestamp up to which priority request should be processed + event NewRelayedPriorityTransaction(uint256 txId, bytes32 txHash, uint64 expirationTimestamp); } diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/ITransactionFilterer.sol b/l1-contracts/contracts/state-transition/chain-interfaces/ITransactionFilterer.sol index 9e1178fc9..a3776cacd 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/ITransactionFilterer.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/ITransactionFilterer.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @title The interface of the L1 -> L2 transaction filterer. /// @author Matter Labs diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IVerifier.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IVerifier.sol index 0577102b1..fe5e2af2c 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IVerifier.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IVerifier.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @notice Part of the configuration parameters of ZKP circuits struct VerifierParams { @@ -16,11 +16,7 @@ interface IVerifier { /// @dev Verifies a zk-SNARK proof. /// @return A boolean value indicating whether the zk-SNARK proof is valid. /// Note: The function may revert execution instead of returning false in some cases. - function verify( - uint256[] calldata _publicInputs, - uint256[] calldata _proof, - uint256[] calldata _recursiveAggregationInput - ) external view returns (bool); + function verify(uint256[] calldata _publicInputs, uint256[] calldata _proof) external view returns (bool); /// @notice Calculates a keccak256 hash of the runtime loaded verification keys. /// @return vkHash The keccak256 hash of the loaded verification keys. diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IZkSyncHyperchain.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IZKChain.sol similarity index 64% rename from l1-contracts/contracts/state-transition/chain-interfaces/IZkSyncHyperchain.sol rename to l1-contracts/contracts/state-transition/chain-interfaces/IZKChain.sol index 6641985a8..31d14009b 100644 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IZkSyncHyperchain.sol +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IZKChain.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; import {IAdmin} from "./IAdmin.sol"; import {IExecutor} from "./IExecutor.sol"; @@ -9,7 +9,7 @@ import {IMailbox} from "./IMailbox.sol"; import {Diamond} from "../libraries/Diamond.sol"; -interface IZkSyncHyperchain is IAdmin, IExecutor, IGetters, IMailbox { +interface IZKChain is IAdmin, IExecutor, IGetters, IMailbox { // We need this structure for the server for now event ProposeTransparentUpgrade( Diamond.DiamondCutData diamondCut, diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IZKChainBase.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IZKChainBase.sol new file mode 100644 index 000000000..06f0c9784 --- /dev/null +++ b/l1-contracts/contracts/state-transition/chain-interfaces/IZKChainBase.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; + +/// @title The interface of the ZKsync contract, responsible for the main ZKsync logic. +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IZKChainBase { + /// @return Returns facet name. + function getName() external view returns (string memory); +} diff --git a/l1-contracts/contracts/state-transition/chain-interfaces/IZkSyncHyperchainBase.sol b/l1-contracts/contracts/state-transition/chain-interfaces/IZkSyncHyperchainBase.sol deleted file mode 100644 index 1b6629f7d..000000000 --- a/l1-contracts/contracts/state-transition/chain-interfaces/IZkSyncHyperchainBase.sol +++ /dev/null @@ -1,10 +0,0 @@ -// SPDX-License-Identifier: UNLICENSED -pragma solidity 0.8.24; - -/// @title The interface of the zkSync contract, responsible for the main zkSync logic. -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -interface IZkSyncHyperchainBase { - /// @return Returns facet name. - function getName() external view returns (string memory); -} diff --git a/l1-contracts/contracts/state-transition/data-availability/CalldataDA.sol b/l1-contracts/contracts/state-transition/data-availability/CalldataDA.sol new file mode 100644 index 000000000..f1e5f7355 --- /dev/null +++ b/l1-contracts/contracts/state-transition/data-availability/CalldataDA.sol @@ -0,0 +1,111 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-custom-errors, reason-string + +/// @dev Total number of bytes in a blob. Blob = 4096 field elements * 31 bytes per field element +/// @dev EIP-4844 defines it as 131_072 but we use 4096 * 31 within our circuits to always fit within a field element +/// @dev Our circuits will prove that a EIP-4844 blob and our internal blob are the same. +uint256 constant BLOB_SIZE_BYTES = 126_976; + +/// @dev The state diff hash, hash of pubdata + the number of blobs. +uint256 constant BLOB_DATA_OFFSET = 65; + +/// @dev The size of the commitment for a single blob. +uint256 constant BLOB_COMMITMENT_SIZE = 32; + +/// @notice Contract that contains the functionality for process the calldata DA. +/// @dev The expected l2DAValidator that should be used with it `RollupL2DAValidator`. +abstract contract CalldataDA { + /// @notice Parses the input that the L2 DA validator has provided to the contract. + /// @param _l2DAValidatorOutputHash The hash of the output of the L2 DA validator. + /// @param _maxBlobsSupported The maximal number of blobs supported by the chain. + /// @param _operatorDAInput The DA input by the operator provided on L1. + function _processL2RollupDAValidatorOutputHash( + bytes32 _l2DAValidatorOutputHash, + uint256 _maxBlobsSupported, + bytes calldata _operatorDAInput + ) + internal + pure + returns ( + bytes32 stateDiffHash, + bytes32 fullPubdataHash, + bytes32[] memory blobsLinearHashes, + uint256 blobsProvided, + bytes calldata l1DaInput + ) + { + // The preimage under the hash `_l2DAValidatorOutputHash` is expected to be in the following format: + // - First 32 bytes are the hash of the uncompressed state diff. + // - Then, there is a 32-byte hash of the full pubdata. + // - Then, there is the 1-byte number of blobs published. + // - Then, there are linear hashes of the published blobs, 32 bytes each. + + // Check that it accommodates enough pubdata for the state diff hash, hash of pubdata + the number of blobs. + require(_operatorDAInput.length >= BLOB_DATA_OFFSET, "too small"); + + stateDiffHash = bytes32(_operatorDAInput[:32]); + fullPubdataHash = bytes32(_operatorDAInput[32:64]); + blobsProvided = uint256(uint8(_operatorDAInput[64])); + + require(blobsProvided <= _maxBlobsSupported, "invalid number of blobs"); + + // Note that the API of the contract requires that the returned blobs linear hashes have length of + // the `_maxBlobsSupported` + blobsLinearHashes = new bytes32[](_maxBlobsSupported); + + require(_operatorDAInput.length >= BLOB_DATA_OFFSET + 32 * blobsProvided, "invalid blobs hashes"); + + _cloneCalldata(blobsLinearHashes, _operatorDAInput[BLOB_DATA_OFFSET:], blobsProvided); + + uint256 ptr = BLOB_DATA_OFFSET + 32 * blobsProvided; + + // Now, we need to double check that the provided input was indeed returned by the L2 DA validator. + require(keccak256(_operatorDAInput[:ptr]) == _l2DAValidatorOutputHash, "invalid l2 DA output hash"); + + // The rest of the output was provided specifically by the operator + l1DaInput = _operatorDAInput[ptr:]; + } + + /// @notice Verify that the calldata DA was correctly provided. + /// @param _blobsProvided The number of blobs provided. + /// @param _fullPubdataHash Hash of the pubdata preimage. + /// @param _maxBlobsSupported Maximum number of blobs supported. + /// @param _pubdataInput Full pubdata + an additional 32 bytes containing the blob commitment for the pubdata. + /// @dev We supply the blob commitment as part of the pubdata because even with calldata the prover will check these values. + function _processCalldataDA( + uint256 _blobsProvided, + bytes32 _fullPubdataHash, + uint256 _maxBlobsSupported, + bytes calldata _pubdataInput + ) internal pure virtual returns (bytes32[] memory blobCommitments, bytes calldata _pubdata) { + require(_blobsProvided == 1, "only one blob with calldata"); + require(_pubdataInput.length >= BLOB_COMMITMENT_SIZE, "pubdata too small"); + + // We typically do not know whether we'll use calldata or blobs at the time when + // we start proving the batch. That's why the blob commitment for a single blob is still present in the case of calldata. + + blobCommitments = new bytes32[](_maxBlobsSupported); + + _pubdata = _pubdataInput[:_pubdataInput.length - BLOB_COMMITMENT_SIZE]; + + require(_pubdata.length <= BLOB_SIZE_BYTES, "cz"); + require(_fullPubdataHash == keccak256(_pubdata), "wp"); + blobCommitments[0] = bytes32(_pubdataInput[_pubdataInput.length - BLOB_COMMITMENT_SIZE:_pubdataInput.length]); + } + + /// @notice Method that clones a slice of calldata into a bytes32[] memory array. + /// @param _dst The destination array. + /// @param _input The input calldata. + /// @param _len The length of the slice in 32-byte words to clone. + function _cloneCalldata(bytes32[] memory _dst, bytes calldata _input, uint256 _len) internal pure { + assembly { + // The pointer to the allocated memory above. We skip 32 bytes to avoid overwriting the length. + let dstPtr := add(_dst, 0x20) + let inputPtr := _input.offset + calldatacopy(dstPtr, inputPtr, mul(_len, 32)) + } + } +} diff --git a/l1-contracts/contracts/state-transition/data-availability/CalldataDAGateway.sol b/l1-contracts/contracts/state-transition/data-availability/CalldataDAGateway.sol new file mode 100644 index 000000000..3e069bbd5 --- /dev/null +++ b/l1-contracts/contracts/state-transition/data-availability/CalldataDAGateway.sol @@ -0,0 +1,36 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {CalldataDA, BLOB_COMMITMENT_SIZE, BLOB_SIZE_BYTES} from "./CalldataDA.sol"; + +// solhint-disable gas-custom-errors, reason-string + +/// @notice Contract that contains the functionality for process the calldata DA. +/// @dev The expected l2DAValidator that should be used with it `RollupL2DAValidator`. +abstract contract CalldataDAGateway is CalldataDA { + /// @inheritdoc CalldataDA + function _processCalldataDA( + uint256 _blobsProvided, + bytes32 _fullPubdataHash, + uint256 _maxBlobsSupported, + bytes calldata _pubdataInput + ) internal pure override returns (bytes32[] memory blobCommitments, bytes calldata _pubdata) { + require(_pubdataInput.length >= _blobsProvided * BLOB_COMMITMENT_SIZE, "pubdata too small"); + + // We typically do not know whether we'll use calldata or blobs at the time when + // we start proving the batch. That's why the blob commitment for a single blob is still present in the case of calldata. + blobCommitments = new bytes32[](_maxBlobsSupported); + + _pubdata = _pubdataInput[:_pubdataInput.length - _blobsProvided * BLOB_COMMITMENT_SIZE]; + + require(_pubdata.length <= _blobsProvided * BLOB_SIZE_BYTES, "cz"); + require(_fullPubdataHash == keccak256(_pubdata), "wp"); + + bytes calldata providedCommitments = _pubdataInput[_pubdataInput.length - + _blobsProvided * + BLOB_COMMITMENT_SIZE:]; + + _cloneCalldata(blobCommitments, providedCommitments, _blobsProvided); + } +} diff --git a/l1-contracts/contracts/state-transition/data-availability/RelayedSLDAValidator.sol b/l1-contracts/contracts/state-transition/data-availability/RelayedSLDAValidator.sol new file mode 100644 index 000000000..d5f3473a7 --- /dev/null +++ b/l1-contracts/contracts/state-transition/data-availability/RelayedSLDAValidator.sol @@ -0,0 +1,101 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-custom-errors, reason-string + +import {IL1DAValidator, L1DAValidatorOutput, PubdataSource} from "../chain-interfaces/IL1DAValidator.sol"; +import {IL1Messenger} from "../../common/interfaces/IL1Messenger.sol"; + +import {CalldataDAGateway} from "./CalldataDAGateway.sol"; + +import {IBridgehub} from "../../bridgehub/IBridgehub.sol"; +import {L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, L2_BRIDGEHUB_ADDR} from "../../common/L2ContractAddresses.sol"; + +/// @notice The DA validator intended to be used in Era-environment. +/// @dev For compatibility reasons it accepts calldata in the same format as the `RollupL1DAValidator`, but unlike the latter it +/// does not support blobs. +/// @dev Note that it does not provide any compression whatsoever. +contract RelayedSLDAValidator is IL1DAValidator, CalldataDAGateway { + /// @dev Ensures that the sender is the chain that is supposed to send the message. + /// @param _chainId The chain id of the chain that is supposed to send the message. + function _ensureOnlyChainSender(uint256 _chainId) internal view { + // Note that this contract is only supposed to be deployed on L2, where the + // bridgehub is predeployed at `L2_BRIDGEHUB_ADDR` address. + require(IBridgehub(L2_BRIDGEHUB_ADDR).getZKChain(_chainId) == msg.sender, "l1-da-validator/invalid-sender"); + } + + /// @dev Relays the calldata to L1. + /// @param _chainId The chain id of the chain that is supposed to send the message. + /// @param _batchNumber The batch number for which the data availability is being checked. + /// @param _pubdata The pubdata to be relayed to L1. + function _relayCalldata(uint256 _chainId, uint256 _batchNumber, bytes calldata _pubdata) internal { + // Re-sending all the pubdata in pure form to L1. + // slither-disable-next-line unused-return + IL1Messenger(L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR).sendToL1(abi.encode(_chainId, _batchNumber, _pubdata)); + } + + /// @inheritdoc IL1DAValidator + function checkDA( + uint256 _chainId, + uint256 _batchNumber, + bytes32 _l2DAValidatorOutputHash, + bytes calldata _operatorDAInput, + uint256 _maxBlobsSupported + ) external returns (L1DAValidatorOutput memory output) { + // Unfortunately we have to use a method call instead of a modifier + // because of the stack-too-deep error caused by it. + _ensureOnlyChainSender(_chainId); + + // Preventing "stack too deep" error + uint256 blobsProvided; + bytes32 fullPubdataHash; + bytes calldata l1DaInput; + { + bytes32 stateDiffHash; + bytes32[] memory blobsLinearHashes; + ( + stateDiffHash, + fullPubdataHash, + blobsLinearHashes, + blobsProvided, + l1DaInput + ) = _processL2RollupDAValidatorOutputHash(_l2DAValidatorOutputHash, _maxBlobsSupported, _operatorDAInput); + + output.stateDiffHash = stateDiffHash; + output.blobsLinearHashes = blobsLinearHashes; + } + + uint8 pubdataSource = uint8(l1DaInput[0]); + + // Note, that the blobs are not supported in the RelayedSLDAValidator. + if (pubdataSource == uint8(PubdataSource.Calldata)) { + bytes calldata pubdata; + bytes32[] memory blobCommitments; + + (blobCommitments, pubdata) = _processCalldataDA( + blobsProvided, + fullPubdataHash, + _maxBlobsSupported, + l1DaInput[1:] + ); + + _relayCalldata(_chainId, _batchNumber, pubdata); + + output.blobsOpeningCommitments = blobCommitments; + } else { + revert("l1-da-validator/invalid-pubdata-source"); + } + + // We verify that for each set of blobHash/blobCommitment are either both empty + // or there are values for both. + // This is mostly a sanity check and it is not strictly required. + for (uint256 i = 0; i < _maxBlobsSupported; ++i) { + require( + (output.blobsLinearHashes[i] == bytes32(0) && output.blobsOpeningCommitments[i] == bytes32(0)) || + (output.blobsLinearHashes[i] != bytes32(0) && output.blobsOpeningCommitments[i] != bytes32(0)), + "bh" + ); + } + } +} diff --git a/l1-contracts/contracts/state-transition/data-availability/ValidiumL1DAValidator.sol b/l1-contracts/contracts/state-transition/data-availability/ValidiumL1DAValidator.sol new file mode 100644 index 000000000..15825bbdd --- /dev/null +++ b/l1-contracts/contracts/state-transition/data-availability/ValidiumL1DAValidator.sol @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +// solhint-disable gas-custom-errors, reason-string + +import {IL1DAValidator, L1DAValidatorOutput} from "../chain-interfaces/IL1DAValidator.sol"; + +contract ValidiumL1DAValidator is IL1DAValidator { + function checkDA( + uint256, // _chainId + uint256, // _batchNumber + bytes32, // _l2DAValidatorOutputHash + bytes calldata _operatorDAInput, + uint256 maxBlobsSupported + ) external override returns (L1DAValidatorOutput memory output) { + // For Validiums, we expect the operator to just provide the data for us. + // We don't need to do any checks with regard to the l2DAValidatorOutputHash. + require(_operatorDAInput.length == 32, "ValL1DA wrong input length"); + + bytes32 stateDiffHash = abi.decode(_operatorDAInput, (bytes32)); + + // The rest of the fields that relate to blobs are empty. + output.stateDiffHash = stateDiffHash; + + output.blobsLinearHashes = new bytes32[](maxBlobsSupported); + output.blobsOpeningCommitments = new bytes32[](maxBlobsSupported); + } +} diff --git a/l1-contracts/contracts/state-transition/l2-deps/IComplexUpgrader.sol b/l1-contracts/contracts/state-transition/l2-deps/IComplexUpgrader.sol new file mode 100644 index 000000000..f07b879e7 --- /dev/null +++ b/l1-contracts/contracts/state-transition/l2-deps/IComplexUpgrader.sol @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IComplexUpgrader { + function upgrade(address _delegateTo, bytes calldata _calldata) external payable; +} diff --git a/l1-contracts/contracts/state-transition/l2-deps/IL2GatewayUpgrade.sol b/l1-contracts/contracts/state-transition/l2-deps/IL2GatewayUpgrade.sol new file mode 100644 index 000000000..fdafe2807 --- /dev/null +++ b/l1-contracts/contracts/state-transition/l2-deps/IL2GatewayUpgrade.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {IL2ContractDeployer} from "../../common/interfaces/IL2ContractDeployer.sol"; + +interface IL2GatewayUpgrade { + function upgrade( + IL2ContractDeployer.ForceDeployment[] calldata _forceDeployments, + address _ctmDeployer, + bytes calldata _fixedForceDeploymentsData, + bytes calldata _additionalForceDeploymentsData + ) external payable; +} diff --git a/l1-contracts/contracts/state-transition/l2-deps/IL2GenesisUpgrade.sol b/l1-contracts/contracts/state-transition/l2-deps/IL2GenesisUpgrade.sol new file mode 100644 index 000000000..6221f2e18 --- /dev/null +++ b/l1-contracts/contracts/state-transition/l2-deps/IL2GenesisUpgrade.sol @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +/// @notice A struct that describes a forced deployment on an address +struct ForceDeployment { + // The bytecode hash to put on an address + bytes32 bytecodeHash; + // The address on which to deploy the bytecodehash to + address newAddress; + // Whether to run the constructor on the force deployment + bool callConstructor; + // The value with which to initialize a contract + uint256 value; + // The constructor calldata + bytes input; +} + +struct ZKChainSpecificForceDeploymentsData { + bytes32 baseTokenAssetId; + address l2LegacySharedBridge; + address l2Weth; +} + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IL2GenesisUpgrade { + event UpgradeComplete(uint256 _chainId); + + function genesisUpgrade( + uint256 _chainId, + address _ctmDeployer, + bytes calldata _fixedForceDeploymentsData, + bytes calldata _additionalForceDeploymentsData + ) external payable; +} diff --git a/l1-contracts/contracts/state-transition/l2-deps/ISystemContext.sol b/l1-contracts/contracts/state-transition/l2-deps/ISystemContext.sol index fded0f4d2..8448cb4e4 100644 --- a/l1-contracts/contracts/state-transition/l2-deps/ISystemContext.sol +++ b/l1-contracts/contracts/state-transition/l2-deps/ISystemContext.sol @@ -1,6 +1,9 @@ -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.0; +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev interface ISystemContext { function setChainId(uint256 _newChainId) external; } diff --git a/l1-contracts/contracts/state-transition/libraries/BatchDecoder.sol b/l1-contracts/contracts/state-transition/libraries/BatchDecoder.sol new file mode 100644 index 000000000..a8af4b7ab --- /dev/null +++ b/l1-contracts/contracts/state-transition/libraries/BatchDecoder.sol @@ -0,0 +1,215 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; + +import {IExecutor} from "../chain-interfaces/IExecutor.sol"; +import {PriorityOpsBatchInfo} from "./PriorityTree.sol"; +import {IncorrectBatchBounds, EmptyData, UnsupportedCommitBatchEncoding, UnsupportedProofBatchEncoding, UnsupportedExecuteBatchEncoding} from "../../common/L1ContractErrors.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice Utility library for decoding and validating batch data. +/// @dev This library decodes commit, proof, and execution batch data and verifies batch number bounds. +/// It reverts with custom errors when the data is invalid or unsupported encoding is used. +library BatchDecoder { + /// @notice The currently supported encoding version. + uint8 internal constant SUPPORTED_ENCODING_VERSION = 0; + + /// @notice Decodes commit data from a calldata bytes into the last committed batch data and an array of new batch data. + /// @param _commitData The calldata byte array containing the data for committing batches. + /// @return lastCommittedBatchData The data for the batch before newly committed batches. + /// @return newBatchesData An array containing the newly committed batches. + function _decodeCommitData( + bytes calldata _commitData + ) + private + pure + returns ( + IExecutor.StoredBatchInfo memory lastCommittedBatchData, + IExecutor.CommitBatchInfo[] memory newBatchesData + ) + { + if (_commitData.length == 0) { + revert EmptyData(); + } + + uint8 encodingVersion = uint8(_commitData[0]); + if (encodingVersion == SUPPORTED_ENCODING_VERSION) { + (lastCommittedBatchData, newBatchesData) = abi.decode( + _commitData[1:], + (IExecutor.StoredBatchInfo, IExecutor.CommitBatchInfo[]) + ); + } else { + revert UnsupportedCommitBatchEncoding(encodingVersion); + } + } + + /// @notice Decodes the commit data and checks that the provided batch bounds are correct. + /// @dev Note that it only checks that the last and the first batches in the array correspond to the provided bounds. + /// The fact that the batches inside the array are provided in the correct order should be checked by the caller. + /// @param _commitData The calldata byte array containing the data for committing batches. + /// @param _processBatchFrom The expected batch number of the first commit batch in the array. + /// @param _processBatchTo The expected batch number of the last commit batch in the array. + /// @return lastCommittedBatchData The data for the batch before newly committed batches. + /// @return newBatchesData An array containing the newly committed batches. + function decodeAndCheckCommitData( + bytes calldata _commitData, + uint256 _processBatchFrom, + uint256 _processBatchTo + ) + internal + pure + returns ( + IExecutor.StoredBatchInfo memory lastCommittedBatchData, + IExecutor.CommitBatchInfo[] memory newBatchesData + ) + { + (lastCommittedBatchData, newBatchesData) = _decodeCommitData(_commitData); + + if (newBatchesData.length == 0) { + revert EmptyData(); + } + + if ( + newBatchesData[0].batchNumber != _processBatchFrom || + newBatchesData[newBatchesData.length - 1].batchNumber != _processBatchTo + ) { + revert IncorrectBatchBounds( + _processBatchFrom, + _processBatchTo, + newBatchesData[0].batchNumber, + newBatchesData[newBatchesData.length - 1].batchNumber + ); + } + } + + /// @notice Decodes proof data from a calldata byte array into the previous batch, an array of proved batches, and a proof array. + /// @param _proofData The calldata byte array containing the data for proving batches. + /// @return prevBatch The batch information before the batches to be verified. + /// @return provedBatches An array containing the the batches to be verified. + /// @return proof An array containing the proof for the verifier. + function _decodeProofData( + bytes calldata _proofData + ) + private + pure + returns ( + IExecutor.StoredBatchInfo memory prevBatch, + IExecutor.StoredBatchInfo[] memory provedBatches, + uint256[] memory proof + ) + { + uint8 encodingVersion = uint8(_proofData[0]); + if (encodingVersion == SUPPORTED_ENCODING_VERSION) { + (prevBatch, provedBatches, proof) = abi.decode( + _proofData[1:], + (IExecutor.StoredBatchInfo, IExecutor.StoredBatchInfo[], uint256[]) + ); + } else { + revert UnsupportedProofBatchEncoding(encodingVersion); + } + } + + /// @notice Decodes the commit data and checks that the provided batch bounds are correct. + /// @dev Note that it only checks that the last and the first batches in the array correspond to the provided bounds. + /// The fact that the batches inside the array are provided in the correct order should be checked by the caller. + /// @param _proofData The commit data to decode. + /// @param _processBatchFrom The expected batch number of the first batch in the array. + /// @param _processBatchTo The expected batch number of the last batch in the array. + /// @return prevBatch The batch information before the batches to be verified. + /// @return provedBatches An array containing the the batches to be verified. + /// @return proof An array containing the proof for the verifier. + function decodeAndCheckProofData( + bytes calldata _proofData, + uint256 _processBatchFrom, + uint256 _processBatchTo + ) + internal + pure + returns ( + IExecutor.StoredBatchInfo memory prevBatch, + IExecutor.StoredBatchInfo[] memory provedBatches, + uint256[] memory proof + ) + { + (prevBatch, provedBatches, proof) = _decodeProofData(_proofData); + + if (provedBatches.length == 0) { + revert EmptyData(); + } + + if ( + provedBatches[0].batchNumber != _processBatchFrom || + provedBatches[provedBatches.length - 1].batchNumber != _processBatchTo + ) { + revert IncorrectBatchBounds( + _processBatchFrom, + _processBatchTo, + provedBatches[0].batchNumber, + provedBatches[provedBatches.length - 1].batchNumber + ); + } + } + + /// @notice Decodes execution data from a calldata byte array into an array of stored batch information. + /// @param _executeData The calldata byte array containing the execution data to decode. + /// @return executeData An array containing the stored batch information for execution. + /// @return priorityOpsData Merkle proofs of the priority operations for each batch. + function _decodeExecuteData( + bytes calldata _executeData + ) + private + pure + returns (IExecutor.StoredBatchInfo[] memory executeData, PriorityOpsBatchInfo[] memory priorityOpsData) + { + if (_executeData.length == 0) { + revert EmptyData(); + } + + uint8 encodingVersion = uint8(_executeData[0]); + if (encodingVersion == 0) { + (executeData, priorityOpsData) = abi.decode( + _executeData[1:], + (IExecutor.StoredBatchInfo[], PriorityOpsBatchInfo[]) + ); + } else { + revert UnsupportedExecuteBatchEncoding(encodingVersion); + } + } + + /// @notice Decodes the execute data and checks that the provided batch bounds are correct. + /// @dev Note that it only checks that the last and the first batches in the array correspond to the provided bounds. + /// The fact that the batches inside the array are provided in the correct order should be checked by the caller. + /// @param _executeData The calldata byte array containing the execution data to decode. + /// @param _processBatchFrom The expected batch number of the first batch in the array. + /// @param _processBatchTo The expected batch number of the last batch in the array. + /// @return executeData An array containing the stored batch information for execution. + /// @return priorityOpsData Merkle proofs of the priority operations for each batch. + function decodeAndCheckExecuteData( + bytes calldata _executeData, + uint256 _processBatchFrom, + uint256 _processBatchTo + ) + internal + pure + returns (IExecutor.StoredBatchInfo[] memory executeData, PriorityOpsBatchInfo[] memory priorityOpsData) + { + (executeData, priorityOpsData) = _decodeExecuteData(_executeData); + + if (executeData.length == 0) { + revert EmptyData(); + } + + if ( + executeData[0].batchNumber != _processBatchFrom || + executeData[executeData.length - 1].batchNumber != _processBatchTo + ) { + revert IncorrectBatchBounds( + _processBatchFrom, + _processBatchTo, + executeData[0].batchNumber, + executeData[executeData.length - 1].batchNumber + ); + } + } +} diff --git a/l1-contracts/contracts/state-transition/libraries/Diamond.sol b/l1-contracts/contracts/state-transition/libraries/Diamond.sol index 4e44375da..b43a673c5 100644 --- a/l1-contracts/contracts/state-transition/libraries/Diamond.sol +++ b/l1-contracts/contracts/state-transition/libraries/Diamond.sol @@ -1,9 +1,10 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {SafeCast} from "@openzeppelin/contracts/utils/math/SafeCast.sol"; +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; import {UncheckedMath} from "../../common/libraries/UncheckedMath.sol"; +import {NoFunctionsForDiamondCut, UndefinedDiamondCutAction, AddressHasNoCode, FacetExists, RemoveFunctionFacetAddressZero, SelectorsMustAllHaveSameFreezability, NonEmptyCalldata, ReplaceFunctionFacetAddressZero, RemoveFunctionFacetAddressNotZero, DelegateCallFailed} from "../../common/L1ContractErrors.sol"; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev @@ -59,6 +60,7 @@ library Diamond { /// @param action The action that is made on the facet /// @param isFreezable Denotes whether the facet & all their selectors can be frozen /// @param selectors An array of unique selectors that belongs to the facet address + // solhint-disable-next-line gas-struct-packing struct FacetCut { address facet; Action action; @@ -104,7 +106,9 @@ library Diamond { bool isFacetFreezable = facetCuts[i].isFreezable; bytes4[] memory selectors = facetCuts[i].selectors; - require(selectors.length > 0, "B"); // no functions for diamond cut + if (selectors.length == 0) { + revert NoFunctionsForDiamondCut(); + } if (action == Action.Add) { _addFunctions(facet, selectors, isFacetFreezable); @@ -113,7 +117,7 @@ library Diamond { } else if (action == Action.Remove) { _removeFunctions(facet, selectors); } else { - revert("C"); // undefined diamond cut action + revert UndefinedDiamondCutAction(); } } @@ -129,7 +133,9 @@ library Diamond { // Facet with no code cannot be added. // This check also verifies that the facet does not have zero address, since it is the // address with which 0x00000000 selector is associated. - require(_facet.code.length > 0, "G"); + if (_facet.code.length == 0) { + revert AddressHasNoCode(_facet); + } // Add facet to the list of facets if the facet address is new one _saveFacetIfNew(_facet); @@ -138,7 +144,9 @@ library Diamond { for (uint256 i = 0; i < selectorsLength; i = i.uncheckedInc()) { bytes4 selector = _selectors[i]; SelectorToFacet memory oldFacet = ds.selectorToFacet[selector]; - require(oldFacet.facetAddress == address(0), "J"); // facet for this selector already exists + if (oldFacet.facetAddress != address(0)) { + revert FacetExists(selector, oldFacet.facetAddress); + } _addOneFunction(_facet, selector, _isFacetFreezable); } @@ -152,13 +160,18 @@ library Diamond { // Facet with no code cannot be added. // This check also verifies that the facet does not have zero address, since it is the // address with which 0x00000000 selector is associated. - require(_facet.code.length > 0, "K"); + if (_facet.code.length == 0) { + revert AddressHasNoCode(_facet); + } uint256 selectorsLength = _selectors.length; for (uint256 i = 0; i < selectorsLength; i = i.uncheckedInc()) { bytes4 selector = _selectors[i]; SelectorToFacet memory oldFacet = ds.selectorToFacet[selector]; - require(oldFacet.facetAddress != address(0), "L"); // it is impossible to replace the facet with zero address + // it is impossible to replace the facet with zero address + if (oldFacet.facetAddress == address(0)) { + revert ReplaceFunctionFacetAddressZero(); + } _removeOneFunction(oldFacet.facetAddress, selector); // Add facet to the list of facets if the facet address is a new one @@ -172,13 +185,19 @@ library Diamond { function _removeFunctions(address _facet, bytes4[] memory _selectors) private { DiamondStorage storage ds = getDiamondStorage(); - require(_facet == address(0), "a1"); // facet address must be zero + // facet address must be zero + if (_facet != address(0)) { + revert RemoveFunctionFacetAddressNotZero(_facet); + } uint256 selectorsLength = _selectors.length; for (uint256 i = 0; i < selectorsLength; i = i.uncheckedInc()) { bytes4 selector = _selectors[i]; SelectorToFacet memory oldFacet = ds.selectorToFacet[selector]; - require(oldFacet.facetAddress != address(0), "a2"); // Can't delete a non-existent facet + // Can't delete a non-existent facet + if (oldFacet.facetAddress == address(0)) { + revert RemoveFunctionFacetAddressZero(); + } _removeOneFunction(oldFacet.facetAddress, selector); } @@ -212,7 +231,9 @@ library Diamond { // so all the selectors in a facet will have the same freezability if (selectorPosition != 0) { bytes4 selector0 = ds.facetToSelectors[_facet].selectors[0]; - require(_isSelectorFreezable == ds.selectorToFacet[selector0].isFreezable, "J1"); + if (_isSelectorFreezable != ds.selectorToFacet[selector0].isFreezable) { + revert SelectorsMustAllHaveSameFreezability(); + } } ds.selectorToFacet[_selector] = SelectorToFacet({ @@ -277,14 +298,18 @@ library Diamond { /// @dev Used as a final step of diamond cut to execute the logic of the initialization for changed facets function _initializeDiamondCut(address _init, bytes memory _calldata) private { if (_init == address(0)) { - require(_calldata.length == 0, "H"); // Non-empty calldata for zero address + // Non-empty calldata for zero address + if (_calldata.length != 0) { + revert NonEmptyCalldata(); + } } else { // Do not check whether `_init` is a contract since later we check that it returns data. (bool success, bytes memory data) = _init.delegatecall(_calldata); if (!success) { // If the returndata is too small, we still want to produce some meaningful error - if (data.length <= 4) { - revert("I"); // delegatecall failed + + if (data.length < 4) { + revert DelegateCallFailed(data); } assembly { @@ -294,8 +319,12 @@ library Diamond { // Check that called contract returns magic value to make sure that contract logic // supposed to be used as diamond cut initializer. - require(data.length == 32, "lp"); - require(abi.decode(data, (bytes32)) == DIAMOND_INIT_SUCCESS_RETURN_VALUE, "lp1"); + if (data.length != 32) { + revert DelegateCallFailed(data); + } + if (abi.decode(data, (bytes32)) != DIAMOND_INIT_SUCCESS_RETURN_VALUE) { + revert DelegateCallFailed(data); + } } } } diff --git a/l1-contracts/contracts/state-transition/libraries/LibMap.sol b/l1-contracts/contracts/state-transition/libraries/LibMap.sol index 1ba8a82be..2cbad0b78 100644 --- a/l1-contracts/contracts/state-transition/libraries/LibMap.sol +++ b/l1-contracts/contracts/state-transition/libraries/LibMap.sol @@ -1,5 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; /// @notice Library for storage of packed unsigned integers. /// @author Matter Labs diff --git a/l1-contracts/contracts/state-transition/libraries/Merkle.sol b/l1-contracts/contracts/state-transition/libraries/Merkle.sol deleted file mode 100644 index ec31073aa..000000000 --- a/l1-contracts/contracts/state-transition/libraries/Merkle.sol +++ /dev/null @@ -1,47 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {UncheckedMath} from "../../common/libraries/UncheckedMath.sol"; - -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -library Merkle { - using UncheckedMath for uint256; - - /// @dev Calculate Merkle root by the provided Merkle proof. - /// NOTE: When using this function, check that the _path length is equal to the tree height to prevent shorter/longer paths attack - /// @param _path Merkle path from the leaf to the root - /// @param _index Leaf index in the tree - /// @param _itemHash Hash of leaf content - /// @return The Merkle root - function calculateRoot( - bytes32[] calldata _path, - uint256 _index, - bytes32 _itemHash - ) internal pure returns (bytes32) { - uint256 pathLength = _path.length; - require(pathLength > 0, "xc"); - require(pathLength < 256, "bt"); - require(_index < (1 << pathLength), "px"); - - bytes32 currentHash = _itemHash; - for (uint256 i; i < pathLength; i = i.uncheckedInc()) { - currentHash = (_index % 2 == 0) - ? _efficientHash(currentHash, _path[i]) - : _efficientHash(_path[i], currentHash); - _index /= 2; - } - - return currentHash; - } - - /// @dev Keccak hash of the concatenation of two 32-byte words - function _efficientHash(bytes32 _lhs, bytes32 _rhs) private pure returns (bytes32 result) { - assembly { - mstore(0x00, _lhs) - mstore(0x20, _rhs) - result := keccak256(0x00, 0x40) - } - } -} diff --git a/l1-contracts/contracts/state-transition/libraries/PriorityQueue.sol b/l1-contracts/contracts/state-transition/libraries/PriorityQueue.sol index cb43068df..141cd40c0 100644 --- a/l1-contracts/contracts/state-transition/libraries/PriorityQueue.sol +++ b/l1-contracts/contracts/state-transition/libraries/PriorityQueue.sol @@ -1,6 +1,8 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; +import {QueueIsEmpty} from "../../common/L1ContractErrors.sol"; /// @notice The structure that contains meta information of the L2 transaction that was requested from L1 /// @dev The weird size of fields was selected specifically to minimize the structure storage size @@ -62,7 +64,10 @@ library PriorityQueue { /// @return The first unprocessed priority operation from the queue function front(Queue storage _queue) internal view returns (PriorityOperation memory) { - require(!_queue.isEmpty(), "D"); // priority queue is empty + // priority queue is empty + if (_queue.isEmpty()) { + revert QueueIsEmpty(); + } return _queue.data[_queue.head]; } @@ -70,7 +75,10 @@ library PriorityQueue { /// @notice Remove the first unprocessed priority operation from the queue /// @return priorityOperation that was popped from the priority queue function popFront(Queue storage _queue) internal returns (PriorityOperation memory priorityOperation) { - require(!_queue.isEmpty(), "s"); // priority queue is empty + // priority queue is empty + if (_queue.isEmpty()) { + revert QueueIsEmpty(); + } // Save value into the stack to avoid double reading from the storage uint256 head = _queue.head; diff --git a/l1-contracts/contracts/state-transition/libraries/PriorityTree.sol b/l1-contracts/contracts/state-transition/libraries/PriorityTree.sol new file mode 100644 index 000000000..71d6d9df1 --- /dev/null +++ b/l1-contracts/contracts/state-transition/libraries/PriorityTree.sol @@ -0,0 +1,121 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the zkSync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; + +// solhint-disable gas-custom-errors + +import {DynamicIncrementalMerkle} from "../../common/libraries/DynamicIncrementalMerkle.sol"; +import {Merkle} from "../../common/libraries/Merkle.sol"; +import {PriorityTreeCommitment} from "../../common/Config.sol"; + +struct PriorityOpsBatchInfo { + bytes32[] leftPath; + bytes32[] rightPath; + bytes32[] itemHashes; +} + +bytes32 constant ZERO_LEAF_HASH = keccak256(""); + +library PriorityTree { + using PriorityTree for Tree; + using DynamicIncrementalMerkle for DynamicIncrementalMerkle.Bytes32PushTree; + + struct Tree { + uint256 startIndex; // priority tree started accepting priority ops from this index + uint256 unprocessedIndex; // relative to `startIndex` + mapping(bytes32 => bool) historicalRoots; + DynamicIncrementalMerkle.Bytes32PushTree tree; + } + + /// @notice Returns zero if and only if no operations were processed from the queue + /// @return Index of the oldest priority operation that wasn't processed yet + function getFirstUnprocessedPriorityTx(Tree storage _tree) internal view returns (uint256) { + return _tree.startIndex + _tree.unprocessedIndex; + } + + /// @return The total number of priority operations that were added to the priority queue, including all processed ones + function getTotalPriorityTxs(Tree storage _tree) internal view returns (uint256) { + return _tree.startIndex + _tree.tree._nextLeafIndex; + } + + /// @return The total number of unprocessed priority operations in a priority queue + function getSize(Tree storage _tree) internal view returns (uint256) { + return _tree.tree._nextLeafIndex - _tree.unprocessedIndex; + } + + /// @notice Add the priority operation to the end of the priority queue + function push(Tree storage _tree, bytes32 _hash) internal { + (, bytes32 newRoot) = _tree.tree.push(_hash); + _tree.historicalRoots[newRoot] = true; + } + + /// @notice Set up the tree + function setup(Tree storage _tree, uint256 _startIndex) internal { + _tree.tree.setup(ZERO_LEAF_HASH); + _tree.startIndex = _startIndex; + } + + /// @return Returns the tree root. + function getRoot(Tree storage _tree) internal view returns (bytes32) { + return _tree.tree.root(); + } + + /// @param _root The root to check. + /// @return Returns true if the root is a historical root. + function isHistoricalRoot(Tree storage _tree, bytes32 _root) internal view returns (bool) { + return _tree.historicalRoots[_root]; + } + + /// @notice Process the priority operations of a batch. + function processBatch(Tree storage _tree, PriorityOpsBatchInfo memory _priorityOpsData) internal { + if (_priorityOpsData.itemHashes.length > 0) { + bytes32 expectedRoot = Merkle.calculateRootPaths( + _priorityOpsData.leftPath, + _priorityOpsData.rightPath, + _tree.unprocessedIndex, + _priorityOpsData.itemHashes + ); + require(_tree.historicalRoots[expectedRoot], "PT: root mismatch"); + _tree.unprocessedIndex += _priorityOpsData.itemHashes.length; + } + } + + /// @notice Initialize a chain from a commitment. + function initFromCommitment(Tree storage _tree, PriorityTreeCommitment memory _commitment) internal { + uint256 height = _commitment.sides.length; // Height, including the root node. + require(height > 0, "PT: invalid commitment"); + _tree.startIndex = _commitment.startIndex; + _tree.unprocessedIndex = _commitment.unprocessedIndex; + _tree.tree._nextLeafIndex = _commitment.nextLeafIndex; + _tree.tree._sides = _commitment.sides; + bytes32 zero = ZERO_LEAF_HASH; + _tree.tree._zeros = new bytes32[](height); + for (uint256 i; i < height; ++i) { + _tree.tree._zeros[i] = zero; + zero = Merkle.efficientHash(zero, zero); + } + _tree.historicalRoots[_tree.tree.root()] = true; + } + + /// @notice Reinitialize the tree from a commitment on L1. + function checkL1Reinit(Tree storage _tree, PriorityTreeCommitment memory _commitment) internal view { + require(_tree.startIndex == _commitment.startIndex, "PT: invalid start index"); + require(_tree.unprocessedIndex >= _commitment.unprocessedIndex, "PT: invalid unprocessed index"); + require(_tree.tree._nextLeafIndex >= _commitment.nextLeafIndex, "PT: invalid next leaf index"); + } + + /// @notice Reinitialize the tree from a commitment on GW. + function checkGWReinit(Tree storage _tree, PriorityTreeCommitment memory _commitment) internal view { + require(_tree.startIndex == _commitment.startIndex, "PT: invalid start index"); + require(_tree.unprocessedIndex <= _commitment.unprocessedIndex, "PT: invalid unprocessed index"); + require(_tree.tree._nextLeafIndex <= _commitment.nextLeafIndex, "PT: invalid next leaf index"); + } + + /// @notice Returns the commitment to the priority tree. + function getCommitment(Tree storage _tree) internal view returns (PriorityTreeCommitment memory commitment) { + commitment.nextLeafIndex = _tree.tree._nextLeafIndex; + commitment.startIndex = _tree.startIndex; + commitment.unprocessedIndex = _tree.unprocessedIndex; + commitment.sides = _tree.tree._sides; + } +} diff --git a/l1-contracts/contracts/state-transition/libraries/TransactionValidator.sol b/l1-contracts/contracts/state-transition/libraries/TransactionValidator.sol index 71ef18a86..f196053f4 100644 --- a/l1-contracts/contracts/state-transition/libraries/TransactionValidator.sol +++ b/l1-contracts/contracts/state-transition/libraries/TransactionValidator.sol @@ -1,13 +1,14 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; -pragma solidity 0.8.24; - -import {Math} from "@openzeppelin/contracts/utils/math/Math.sol"; +import {Math} from "@openzeppelin/contracts-v4/utils/math/Math.sol"; import {L2CanonicalTransaction} from "../../common/Messaging.sol"; import {TX_SLOT_OVERHEAD_L2_GAS, MEMORY_OVERHEAD_GAS, L1_TX_INTRINSIC_L2_GAS, L1_TX_DELTA_544_ENCODING_BYTES, L1_TX_DELTA_FACTORY_DEPS_L2_GAS, L1_TX_MIN_L2_GAS_BASE, L1_TX_INTRINSIC_PUBDATA, L1_TX_DELTA_FACTORY_DEPS_PUBDATA} from "../../common/Config.sol"; +import {TooMuchGas, InvalidUpgradeTxn, UpgradeTxVerifyParam, PubdataGreaterThanLimit, ValidateTxnNotEnoughGas, TxnBodyGasLimitNotEnoughGas} from "../../common/L1ContractErrors.sol"; -/// @title zkSync Library for validating L1 -> L2 transactions +/// @title ZKsync Library for validating L1 -> L2 transactions /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev library TransactionValidator { @@ -25,39 +26,70 @@ library TransactionValidator { uint256 l2GasForTxBody = getTransactionBodyGasLimit(_transaction.gasLimit, _encoded.length); // Ensuring that the transaction is provable - require(l2GasForTxBody <= _priorityTxMaxGasLimit, "ui"); + if (l2GasForTxBody > _priorityTxMaxGasLimit) { + revert TooMuchGas(); + } // Ensuring that the transaction cannot output more pubdata than is processable - require(l2GasForTxBody / _transaction.gasPerPubdataByteLimit <= _priorityTxMaxPubdata, "uk"); + if (l2GasForTxBody / _transaction.gasPerPubdataByteLimit > _priorityTxMaxPubdata) { + revert PubdataGreaterThanLimit(_priorityTxMaxPubdata, l2GasForTxBody / _transaction.gasPerPubdataByteLimit); + } // Ensuring that the transaction covers the minimal costs for its processing: // hashing its content, publishing the factory dependencies, etc. - require( + if ( getMinimalPriorityTransactionGasLimit( _encoded.length, _transaction.factoryDeps.length, _transaction.gasPerPubdataByteLimit - ) <= l2GasForTxBody, - "up" - ); + ) > l2GasForTxBody + ) { + revert ValidateTxnNotEnoughGas(); + } } /// @dev Used to validate upgrade transactions /// @param _transaction The transaction to validate function validateUpgradeTransaction(L2CanonicalTransaction memory _transaction) internal pure { // Restrict from to be within system contract range (0...2^16 - 1) - require(_transaction.from <= type(uint16).max, "ua"); - require(_transaction.to <= type(uint160).max, "ub"); - require(_transaction.paymaster == 0, "uc"); - require(_transaction.value == 0, "ud"); - require(_transaction.maxFeePerGas == 0, "uq"); - require(_transaction.maxPriorityFeePerGas == 0, "ux"); - require(_transaction.reserved[0] == 0, "ue"); - require(_transaction.reserved[1] <= type(uint160).max, "uf"); - require(_transaction.reserved[2] == 0, "ug"); - require(_transaction.reserved[3] == 0, "uo"); - require(_transaction.signature.length == 0, "uh"); - require(_transaction.paymasterInput.length == 0, "ul1"); - require(_transaction.reservedDynamic.length == 0, "um"); + if (_transaction.from > type(uint16).max) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.From); + } + if (_transaction.to > type(uint160).max) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.To); + } + if (_transaction.paymaster != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.Paymaster); + } + if (_transaction.value != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.Value); + } + if (_transaction.maxFeePerGas != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.MaxFeePerGas); + } + if (_transaction.maxPriorityFeePerGas != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.MaxPriorityFeePerGas); + } + if (_transaction.reserved[0] != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.Reserved0); + } + if (_transaction.reserved[1] > type(uint160).max) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.Reserved1); + } + if (_transaction.reserved[2] != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.Reserved2); + } + if (_transaction.reserved[3] != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.Reserved3); + } + if (_transaction.signature.length != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.Signature); + } + if (_transaction.paymasterInput.length != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.PaymasterInput); + } + if (_transaction.reservedDynamic.length != 0) { + revert InvalidUpgradeTxn(UpgradeTxVerifyParam.ReservedDynamic); + } } /// @dev Calculates the approximate minimum gas limit required for executing a priority transaction. @@ -112,7 +144,10 @@ library TransactionValidator { ) internal pure returns (uint256 txBodyGasLimit) { uint256 overhead = getOverheadForTransaction(_encodingLength); - require(_totalGasLimit >= overhead, "my"); // provided gas limit doesn't cover transaction overhead + // provided gas limit doesn't cover transaction overhead + if (_totalGasLimit < overhead) { + revert TxnBodyGasLimitNotEnoughGas(); + } unchecked { // We enforce the fact that `_totalGasLimit >= overhead` explicitly above. txBodyGasLimit = _totalGasLimit - overhead; @@ -122,7 +157,7 @@ library TransactionValidator { /// @notice Based on the total L2 gas limit and several other parameters of the transaction /// returns the part of the L2 gas that will be spent on the batch's overhead. /// @dev The details of how this function works can be checked in the documentation - /// of the fee model of zkSync. The appropriate comments are also present + /// of the fee model of ZKsync. The appropriate comments are also present /// in the Rust implementation description of function `get_maximal_allowed_overhead`. /// @param _encodingLength The length of the binary encoding of the transaction in bytes function getOverheadForTransaction( diff --git a/l1-contracts/contracts/transactionFilterer/GatewayTransactionFilterer.sol b/l1-contracts/contracts/transactionFilterer/GatewayTransactionFilterer.sol new file mode 100644 index 000000000..ebcb37ff8 --- /dev/null +++ b/l1-contracts/contracts/transactionFilterer/GatewayTransactionFilterer.sol @@ -0,0 +1,110 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; + +import {ReentrancyGuard} from "../common/ReentrancyGuard.sol"; +import {AlreadyWhitelisted, InvalidSelector, NotWhitelisted, ZeroAddress} from "../common/L1ContractErrors.sol"; +import {ITransactionFilterer} from "../state-transition/chain-interfaces/ITransactionFilterer.sol"; +import {IBridgehub} from "../bridgehub/IBridgehub.sol"; +import {IL2Bridge} from "../bridge/interfaces/IL2Bridge.sol"; +import {IAssetRouterBase} from "../bridge/asset-router/IAssetRouterBase.sol"; +import {IL2AssetRouter} from "../bridge/asset-router/IL2AssetRouter.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @dev Filters transactions received by the Mailbox +/// @dev Only allows whitelisted senders to deposit to Gateway +contract GatewayTransactionFilterer is ITransactionFilterer, ReentrancyGuard, Ownable2StepUpgradeable { + /// @notice Event emitted when sender is whitelisted + event WhitelistGranted(address indexed sender); + + /// @notice Event emitted when sender is removed from whitelist + event WhitelistRevoked(address indexed sender); + + /// @notice The ecosystem's Bridgehub + IBridgehub public immutable BRIDGE_HUB; + + /// @notice The L1 asset router + address public immutable L1_ASSET_ROUTER; + + /// @notice Indicates whether the sender is whitelisted to deposit to Gateway + mapping(address sender => bool whitelisted) public whitelistedSenders; + + /// @dev Contract is expected to be used as proxy implementation. + /// @dev Initialize the implementation to prevent Parity hack. + constructor(IBridgehub _bridgeHub, address _assetRouter) reentrancyGuardInitializer { + BRIDGE_HUB = _bridgeHub; + L1_ASSET_ROUTER = _assetRouter; + _disableInitializers(); + } + + /// @notice Initializes a contract filterer for later use. Expected to be used in the proxy. + /// @param _owner The address which can upgrade the implementation. + function initialize(address _owner) external reentrancyGuardInitializer initializer { + if (_owner == address(0)) { + revert ZeroAddress(); + } + _transferOwnership(_owner); + } + + /// @notice Whitelists the sender. + /// @param sender Address of the tx sender. + function grantWhitelist(address sender) external onlyOwner { + if (whitelistedSenders[sender]) { + revert AlreadyWhitelisted(sender); + } + whitelistedSenders[sender] = true; + emit WhitelistGranted(sender); + } + + /// @notice Revoke the sender from whitelist. + /// @param sender Address of the tx sender. + function revokeWhitelist(address sender) external onlyOwner { + if (!whitelistedSenders[sender]) { + revert NotWhitelisted(sender); + } + whitelistedSenders[sender] = false; + emit WhitelistRevoked(sender); + } + + /// @notice Checks if the transaction is allowed + /// @param sender The sender of the transaction + /// @param l2Calldata The calldata of the L2 transaction + /// @return Whether the transaction is allowed + function isTransactionAllowed( + address sender, + address, + uint256, + uint256, + bytes calldata l2Calldata, + address + ) external view returns (bool) { + if (sender == L1_ASSET_ROUTER) { + bytes4 l2TxSelector = bytes4(l2Calldata[:4]); + + if (IL2AssetRouter.setAssetHandlerAddress.selector == l2TxSelector) { + (, bytes32 decodedAssetId, ) = abi.decode(l2Calldata[4:], (uint256, bytes32, address)); + return _checkSTMAssetId(decodedAssetId); + } + + if ( + IAssetRouterBase.finalizeDeposit.selector != l2TxSelector && + IL2Bridge.finalizeDeposit.selector != l2TxSelector + ) { + revert InvalidSelector(l2TxSelector); + } + + (, bytes32 decodedAssetId, ) = abi.decode(l2Calldata[4:], (uint256, bytes32, bytes)); + return _checkSTMAssetId(decodedAssetId); + } + + return whitelistedSenders[sender]; + } + + function _checkSTMAssetId(bytes32 assetId) internal view returns (bool) { + address stmAddress = BRIDGE_HUB.ctmAssetIdToAddress(assetId); + return stmAddress != address(0); + } +} diff --git a/l1-contracts/contracts/upgrades/BaseZkSyncUpgrade.sol b/l1-contracts/contracts/upgrades/BaseZkSyncUpgrade.sol index 72c02f277..be10e146d 100644 --- a/l1-contracts/contracts/upgrades/BaseZkSyncUpgrade.sol +++ b/l1-contracts/contracts/upgrades/BaseZkSyncUpgrade.sol @@ -2,15 +2,16 @@ pragma solidity 0.8.24; -import {SafeCast} from "@openzeppelin/contracts/utils/math/SafeCast.sol"; +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; -import {ZkSyncHyperchainBase} from "../state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol"; +import {ZKChainBase} from "../state-transition/chain-deps/facets/ZKChainBase.sol"; import {VerifierParams} from "../state-transition/chain-interfaces/IVerifier.sol"; import {IVerifier} from "../state-transition/chain-interfaces/IVerifier.sol"; import {L2ContractHelper} from "../common/libraries/L2ContractHelper.sol"; import {TransactionValidator} from "../state-transition/libraries/TransactionValidator.sol"; import {MAX_NEW_FACTORY_DEPS, SYSTEM_UPGRADE_L2_TX_TYPE, MAX_ALLOWED_MINOR_VERSION_DELTA} from "../common/Config.sol"; import {L2CanonicalTransaction} from "../common/Messaging.sol"; +import {ProtocolVersionMinorDeltaTooBig, TimeNotReached, InvalidTxType, L2UpgradeNonceNotEqualToNewProtocolVersion, TooManyFactoryDeps, UnexpectedNumberOfFactoryDeps, ProtocolVersionTooSmall, PreviousUpgradeNotFinalized, PreviousUpgradeNotCleaned, L2BytecodeHashMismatch, PatchCantSetUpgradeTxn, PreviousProtocolMajorVersionNotZero, NewProtocolMajorVersionNotZero, PatchUpgradeCantSetDefaultAccount, PatchUpgradeCantSetBootloader} from "./ZkSyncUpgradeErrors.sol"; import {SemVer} from "../common/libraries/SemVer.sol"; /// @notice The struct that represents the upgrade proposal. @@ -32,7 +33,10 @@ struct ProposedUpgrade { bytes[] factoryDeps; bytes32 bootloaderHash; bytes32 defaultAccountHash; - address verifier; + address dualVerifier; + address plonkVerifier; + address fflonkVerifier; + uint256 fflonkProofLength; VerifierParams verifierParams; bytes l1ContractsUpgradeCalldata; bytes postUpgradeCalldata; @@ -43,7 +47,7 @@ struct ProposedUpgrade { /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev /// @notice Interface to which all the upgrade implementations should adhere -abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { +abstract contract BaseZkSyncUpgrade is ZKChainBase { /// @notice Changes the protocol version event NewProtocolVersion(uint256 indexed previousProtocolVersion, uint256 indexed newProtocolVersion); @@ -53,8 +57,17 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { /// @notice Сhanges to the bytecode that is used in L2 as a default account event NewL2DefaultAccountBytecodeHash(bytes32 indexed previousBytecodeHash, bytes32 indexed newBytecodeHash); - /// @notice Verifier address changed - event NewVerifier(address indexed oldVerifier, address indexed newVerifier); + /// @notice DualVerifier address changed + event NewDualVerifier(address indexed oldDualVerifier, address indexed newDualVerifier); + + /// @notice PlonkVerifier address changed + event NewPlonkVerifier(address indexed oldPlonkVerifier, address indexed newPlonkVerifier); + + /// @notice FflonkVerifier address changed + event NewFflonkVerifier(address indexed oldFflonkVerifier, address indexed newFflonkVerifier); + + /// @notice Fflonk Proof length changed + event NewFflonkProofLength(uint256 indexed oldFflonkProofLength, uint256 indexed newFflonkProofLength); /// @notice Verifier parameters changed event NewVerifierParams(VerifierParams oldVerifierParams, VerifierParams newVerifierParams); @@ -71,11 +84,13 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { // of the L1 block at which the upgrade occurred. This means that using timestamp as a signifier of "upgraded" // on the L2 side would be inaccurate. The effects of this "back-dating" of L2 upgrade batches will be reduced // as the permitted delay window is reduced in the future. - require(block.timestamp >= _proposedUpgrade.upgradeTimestamp, "Upgrade is not ready yet"); + if (block.timestamp < _proposedUpgrade.upgradeTimestamp) { + revert TimeNotReached(_proposedUpgrade.upgradeTimestamp, block.timestamp); + } (uint32 newMinorVersion, bool isPatchOnly) = _setNewProtocolVersion(_proposedUpgrade.newProtocolVersion); _upgradeL1Contract(_proposedUpgrade.l1ContractsUpgradeCalldata); - _upgradeVerifier(_proposedUpgrade.verifier, _proposedUpgrade.verifierParams); + _upgradeVerifiers(_proposedUpgrade.dualVerifier, _proposedUpgrade.plonkVerifier, _proposedUpgrade.fflonkVerifier, _proposedUpgrade.fflonkProofLength, _proposedUpgrade.verifierParams); _setBaseSystemContracts(_proposedUpgrade.bootloaderHash, _proposedUpgrade.defaultAccountHash, isPatchOnly); txHash = _setL2SystemContractUpgrade( @@ -98,7 +113,9 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { return; } - require(!_patchOnly, "Patch only upgrade can not set new default account"); + if (_patchOnly) { + revert PatchUpgradeCantSetDefaultAccount(); + } L2ContractHelper.validateBytecodeHash(_l2DefaultAccountBytecodeHash); @@ -118,7 +135,9 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { return; } - require(!_patchOnly, "Patch only upgrade can not set new bootloader"); + if (_patchOnly) { + revert PatchUpgradeCantSetBootloader(); + } L2ContractHelper.validateBytecodeHash(_l2BootloaderBytecodeHash); @@ -130,20 +149,68 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { emit NewL2BootloaderBytecodeHash(previousBootloaderBytecodeHash, _l2BootloaderBytecodeHash); } - /// @notice Change the address of the verifier smart contract - /// @param _newVerifier Verifier smart contract address - function _setVerifier(IVerifier _newVerifier) private { + /// @notice Change the address of the Wrapper verifier smart contract + /// @param _newDualVerifier DualVerifier smart contract address + function _setDualVerifier(IVerifier _newDualVerifier) private { + // An upgrade to the verifier must be done carefully to ensure there aren't batches in the committed state + // during the transition. If verifier is upgraded, it will immediately be used to prove all committed batches. + // Batches committed expecting the old verifier will fail. Ensure all committed batches are finalized before the + // verifier is upgraded. + if (_newDualVerifier == IVerifier(address(0))) { + return; + } + + IVerifier oldDualVerifier = s.dualVerifier; + s.dualVerifier = _newDualVerifier; + emit NewDualVerifier(address(oldDualVerifier), address(_newDualVerifier)); + } + + /// @notice Change the address of the PLONK verifier smart contract + /// @param _newPlonkVerifier PlonkVerifier smart contract address + function _setPlonkVerifier(address _newPlonkVerifier) private { + // An upgrade to the verifier must be done carefully to ensure there aren't batches in the committed state + // during the transition. If verifier is upgraded, it will immediately be used to prove all committed batches. + // Batches committed expecting the old verifier will fail. Ensure all committed batches are finalized before the + // verifier is upgraded. + if (_newPlonkVerifier == address(0)) { + return; + } + + address oldPlonkVerifier = s.plonkVerifier; + s.plonkVerifier = _newPlonkVerifier; + emit NewPlonkVerifier(oldPlonkVerifier, _newPlonkVerifier); + } + + /// @notice Change the address of the FFLONK verifier smart contract + /// @param _newFflonkVerifier FflonkVerifier smart contract address + function _setFflonkVerifier(address _newFflonkVerifier) private { + // An upgrade to the verifier must be done carefully to ensure there aren't batches in the committed state + // during the transition. If verifier is upgraded, it will immediately be used to prove all committed batches. + // Batches committed expecting the old verifier will fail. Ensure all committed batches are finalized before the + // verifier is upgraded. + if (_newFflonkVerifier == address(0)) { + return; + } + + address oldFflonkVerifier = s.fflonkVerifier; + s.fflonkVerifier = _newFflonkVerifier; + emit NewFflonkVerifier(oldFflonkVerifier, _newFflonkVerifier); + } + + /// @notice Change the length of the FFLONK proof type + /// @param _newFflonkProofLength Fflonk proof length + function _setFflonkProofLength(uint256 _newFflonkProofLength) private { // An upgrade to the verifier must be done carefully to ensure there aren't batches in the committed state // during the transition. If verifier is upgraded, it will immediately be used to prove all committed batches. // Batches committed expecting the old verifier will fail. Ensure all committed batches are finalized before the // verifier is upgraded. - if (_newVerifier == IVerifier(address(0))) { + if (_newFflonkProofLength == 0) { return; } - IVerifier oldVerifier = s.verifier; - s.verifier = _newVerifier; - emit NewVerifier(address(oldVerifier), address(_newVerifier)); + uint256 oldFflonkProofLength = s.fflonkProofLength; + s.fflonkProofLength = _newFflonkProofLength; + emit NewFflonkProofLength(oldFflonkProofLength, _newFflonkProofLength); } /// @notice Change the verifier parameters @@ -167,10 +234,16 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { } /// @notice Updates the verifier and the verifier params - /// @param _newVerifier The address of the new verifier. If 0, the verifier will not be updated. + /// @param _newDualVerifier The address of the new Wrapper verifier. If 0, the verifier will not be updated. + /// @param _newPlonkVerifier The address of the new PLONK verifier. If 0, the verifier will not be updated. + /// @param _newFflonkVerifier The address of the new FFLONK verifier. If 0, the verifier will not be updated. + /// @param _newFflonkProofLength The length of new FFLONK proof. If 0, the length will not be updated. /// @param _verifierParams The new verifier params. If all of the fields are 0, the params will not be updated. - function _upgradeVerifier(address _newVerifier, VerifierParams calldata _verifierParams) internal { - _setVerifier(IVerifier(_newVerifier)); + function _upgradeVerifiers(address _newDualVerifier, address _newPlonkVerifier, address _newFflonkVerifier, uint256 _newFflonkProofLength, VerifierParams calldata _verifierParams) internal { + _setDualVerifier(IVerifier(_newDualVerifier)); + _setPlonkVerifier(_newPlonkVerifier); + _setFflonkVerifier(_newFflonkVerifier); + _setFflonkProofLength(_newFflonkProofLength); _setVerifierParams(_verifierParams); } @@ -202,9 +275,12 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { return bytes32(0); } - require(!_patchOnly, "Patch only upgrade can not set upgrade transaction"); - - require(_l2ProtocolUpgradeTx.txType == SYSTEM_UPGRADE_L2_TX_TYPE, "L2 system upgrade tx type is wrong"); + if (_l2ProtocolUpgradeTx.txType != SYSTEM_UPGRADE_L2_TX_TYPE) { + revert InvalidTxType(_l2ProtocolUpgradeTx.txType); + } + if (_patchOnly) { + revert PatchCantSetUpgradeTxn(); + } bytes memory encodedTransaction = abi.encode(_l2ProtocolUpgradeTx); @@ -219,10 +295,9 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { // We want the hashes of l2 system upgrade transactions to be unique. // This is why we require that the `nonce` field is unique to each upgrade. - require( - _l2ProtocolUpgradeTx.nonce == _newMinorProtocolVersion, - "The new protocol version should be included in the L2 system upgrade tx" - ); + if (_l2ProtocolUpgradeTx.nonce != _newMinorProtocolVersion) { + revert L2UpgradeNonceNotEqualToNewProtocolVersion(_l2ProtocolUpgradeTx.nonce, _newMinorProtocolVersion); + } _verifyFactoryDeps(_factoryDeps, _l2ProtocolUpgradeTx.factoryDeps); @@ -237,14 +312,19 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { /// @param _factoryDeps The list of factory deps /// @param _expectedHashes The list of expected bytecode hashes function _verifyFactoryDeps(bytes[] calldata _factoryDeps, uint256[] calldata _expectedHashes) private pure { - require(_factoryDeps.length == _expectedHashes.length, "Wrong number of factory deps"); - require(_factoryDeps.length <= MAX_NEW_FACTORY_DEPS, "Factory deps can be at most 32"); - - for (uint256 i = 0; i < _factoryDeps.length; ++i) { - require( - L2ContractHelper.hashL2Bytecode(_factoryDeps[i]) == bytes32(_expectedHashes[i]), - "Wrong factory dep hash" - ); + if (_factoryDeps.length != _expectedHashes.length) { + revert UnexpectedNumberOfFactoryDeps(); + } + if (_factoryDeps.length > MAX_NEW_FACTORY_DEPS) { + revert TooManyFactoryDeps(); + } + uint256 length = _factoryDeps.length; + + for (uint256 i = 0; i < length; ++i) { + bytes32 bytecodeHash = L2ContractHelper.hashL2Bytecode(_factoryDeps[i]); + if (bytecodeHash != bytes32(_expectedHashes[i])) { + revert L2BytecodeHashMismatch(bytecodeHash, bytes32(_expectedHashes[i])); + } } } @@ -254,20 +334,23 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { uint256 _newProtocolVersion ) internal virtual returns (uint32 newMinorVersion, bool patchOnly) { uint256 previousProtocolVersion = s.protocolVersion; - require( - _newProtocolVersion > previousProtocolVersion, - "New protocol version is not greater than the current one" - ); + if (_newProtocolVersion <= previousProtocolVersion) { + revert ProtocolVersionTooSmall(); + } // slither-disable-next-line unused-return (uint32 previousMajorVersion, uint32 previousMinorVersion, ) = SemVer.unpackSemVer( SafeCast.toUint96(previousProtocolVersion) ); - require(previousMajorVersion == 0, "Implementation requires that the major version is 0 at all times"); + if (previousMajorVersion != 0) { + revert PreviousProtocolMajorVersionNotZero(); + } uint32 newMajorVersion; // slither-disable-next-line unused-return (newMajorVersion, newMinorVersion, ) = SemVer.unpackSemVer(SafeCast.toUint96(_newProtocolVersion)); - require(newMajorVersion == 0, "Major must always be 0"); + if (newMajorVersion != 0) { + revert NewProtocolMajorVersionNotZero(); + } // Since `_newProtocolVersion > previousProtocolVersion`, and both old and new major version is 0, // the difference between minor versions is >= 0. @@ -278,19 +361,22 @@ abstract contract BaseZkSyncUpgrade is ZkSyncHyperchainBase { } // While this is implicitly enforced by other checks above, we still double check just in case - require(minorDelta <= MAX_ALLOWED_MINOR_VERSION_DELTA, "Too big protocol version difference"); + if (minorDelta > MAX_ALLOWED_MINOR_VERSION_DELTA) { + revert ProtocolVersionMinorDeltaTooBig(MAX_ALLOWED_MINOR_VERSION_DELTA, minorDelta); + } // If the minor version changes also, we need to ensure that the previous upgrade has been finalized. // In case the minor version does not change, we permit to keep the old upgrade transaction in the system, but it - // must be ensured in the other parts of the upgrade that the is not overridden. + // must be ensured in the other parts of the upgrade that the upgrade transaction is not overridden. if (!patchOnly) { // If the previous upgrade had an L2 system upgrade transaction, we require that it is finalized. - // Note it is important to keep this check, as otherwise hyperchains might skip upgrades by overwriting - require(s.l2SystemContractsUpgradeTxHash == bytes32(0), "Previous upgrade has not been finalized"); - require( - s.l2SystemContractsUpgradeBatchNumber == 0, - "The batch number of the previous upgrade has not been cleaned" - ); + // Note it is important to keep this check, as otherwise ZK chains might skip upgrades by overwriting + if (s.l2SystemContractsUpgradeTxHash != bytes32(0)) { + revert PreviousUpgradeNotFinalized(s.l2SystemContractsUpgradeTxHash); + } + if (s.l2SystemContractsUpgradeBatchNumber != 0) { + revert PreviousUpgradeNotCleaned(); + } } s.protocolVersion = _newProtocolVersion; diff --git a/l1-contracts/contracts/upgrades/BaseZkSyncUpgradeGenesis.sol b/l1-contracts/contracts/upgrades/BaseZkSyncUpgradeGenesis.sol index 7cf9b5ceb..49237ccfd 100644 --- a/l1-contracts/contracts/upgrades/BaseZkSyncUpgradeGenesis.sol +++ b/l1-contracts/contracts/upgrades/BaseZkSyncUpgradeGenesis.sol @@ -2,9 +2,10 @@ pragma solidity 0.8.24; -import {SafeCast} from "@openzeppelin/contracts/utils/math/SafeCast.sol"; +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; import {BaseZkSyncUpgrade} from "./BaseZkSyncUpgrade.sol"; +import {ProtocolVersionTooSmall, ProtocolVersionDeltaTooLarge, PreviousUpgradeNotFinalized, PreviousUpgradeBatchNotCleared, ProtocolMajorVersionNotZero} from "./ZkSyncUpgradeErrors.sol"; import {MAX_ALLOWED_MINOR_VERSION_DELTA} from "../common/Config.sol"; import {SemVer} from "../common/libraries/SemVer.sol"; @@ -18,21 +19,27 @@ abstract contract BaseZkSyncUpgradeGenesis is BaseZkSyncUpgrade { uint256 _newProtocolVersion ) internal override returns (uint32 newMinorVersion, bool patchOnly) { uint256 previousProtocolVersion = s.protocolVersion; - // IMPORTANT Genesis Upgrade difference: Note this is the only thing change > to >= - require( - _newProtocolVersion >= previousProtocolVersion, - "New protocol version is not greater than the current one" - ); + if ( + // IMPORTANT Genesis Upgrade difference: Note this is the only thing change <= to < + _newProtocolVersion < previousProtocolVersion + ) { + revert ProtocolVersionTooSmall(); + } // slither-disable-next-line unused-return (uint32 previousMajorVersion, uint32 previousMinorVersion, ) = SemVer.unpackSemVer( SafeCast.toUint96(previousProtocolVersion) ); - require(previousMajorVersion == 0, "Implementation requires that the major version is 0 at all times"); + + if (previousMajorVersion != 0) { + revert ProtocolMajorVersionNotZero(); + } uint32 newMajorVersion; // slither-disable-next-line unused-return (newMajorVersion, newMinorVersion, ) = SemVer.unpackSemVer(SafeCast.toUint96(_newProtocolVersion)); - require(newMajorVersion == 0, "Major must always be 0"); + if (newMajorVersion != 0) { + revert ProtocolMajorVersionNotZero(); + } // Since `_newProtocolVersion > previousProtocolVersion`, and both old and new major version is 0, // the difference between minor versions is >= 0. @@ -42,19 +49,22 @@ abstract contract BaseZkSyncUpgradeGenesis is BaseZkSyncUpgrade { patchOnly = false; // While this is implicitly enforced by other checks above, we still double check just in case - require(minorDelta <= MAX_ALLOWED_MINOR_VERSION_DELTA, "Too big protocol version difference"); + if (minorDelta > MAX_ALLOWED_MINOR_VERSION_DELTA) { + revert ProtocolVersionDeltaTooLarge(minorDelta, MAX_ALLOWED_MINOR_VERSION_DELTA); + } // If the minor version changes also, we need to ensure that the previous upgrade has been finalized. // In case the minor version does not change, we permit to keep the old upgrade transaction in the system, but it - // must be ensured in the other parts of the upgrade that the is not overridden. + // must be ensured in the other parts of the upgrade that the upgrade transaction is not overridden. if (!patchOnly) { // If the previous upgrade had an L2 system upgrade transaction, we require that it is finalized. - // Note it is important to keep this check, as otherwise hyperchains might skip upgrades by overwriting - require(s.l2SystemContractsUpgradeTxHash == bytes32(0), "Previous upgrade has not been finalized"); - require( - s.l2SystemContractsUpgradeBatchNumber == 0, - "The batch number of the previous upgrade has not been cleaned" - ); + // Note it is important to keep this check, as otherwise ZK chains might skip upgrades by overwriting + if (s.l2SystemContractsUpgradeTxHash != bytes32(0)) { + revert PreviousUpgradeNotFinalized(s.l2SystemContractsUpgradeTxHash); + } + if (s.l2SystemContractsUpgradeBatchNumber != 0) { + revert PreviousUpgradeBatchNotCleared(); + } } s.protocolVersion = _newProtocolVersion; diff --git a/l1-contracts/contracts/upgrades/GatewayHelper.sol b/l1-contracts/contracts/upgrades/GatewayHelper.sol new file mode 100644 index 000000000..5ae02b9a0 --- /dev/null +++ b/l1-contracts/contracts/upgrades/GatewayHelper.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IL1SharedBridgeLegacy} from "../bridge/interfaces/IL1SharedBridgeLegacy.sol"; +import {IBridgehub} from "../bridgehub/IBridgehub.sol"; + +import {ZKChainSpecificForceDeploymentsData} from "../state-transition/l2-deps/IL2GenesisUpgrade.sol"; + +import {ZKChainStorage} from "../state-transition/chain-deps/ZKChainStorage.sol"; + +library GatewayHelper { + function getZKChainSpecificForceDeploymentsData(ZKChainStorage storage s) internal view returns (bytes memory) { + address sharedBridge = IBridgehub(s.bridgehub).sharedBridge(); + address legacySharedBridge = IL1SharedBridgeLegacy(sharedBridge).l2BridgeAddress(s.chainId); + ZKChainSpecificForceDeploymentsData + memory additionalForceDeploymentsData = ZKChainSpecificForceDeploymentsData({ + baseTokenAssetId: s.baseTokenAssetId, + l2LegacySharedBridge: legacySharedBridge, + l2Weth: address(0) // kl todo + }); + return abi.encode(additionalForceDeploymentsData); + } +} diff --git a/l1-contracts/contracts/upgrades/GatewayUpgrade.sol b/l1-contracts/contracts/upgrades/GatewayUpgrade.sol new file mode 100644 index 000000000..3420d81ae --- /dev/null +++ b/l1-contracts/contracts/upgrades/GatewayUpgrade.sol @@ -0,0 +1,88 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {BaseZkSyncUpgrade, ProposedUpgrade} from "./BaseZkSyncUpgrade.sol"; + +import {DataEncoding} from "../common/libraries/DataEncoding.sol"; + +import {Diamond} from "../state-transition/libraries/Diamond.sol"; +import {PriorityQueue} from "../state-transition/libraries/PriorityQueue.sol"; +import {PriorityTree} from "../state-transition/libraries/PriorityTree.sol"; + +import {IGatewayUpgrade} from "./IGatewayUpgrade.sol"; +import {IComplexUpgrader} from "../state-transition/l2-deps/IComplexUpgrader.sol"; +import {IL2GatewayUpgrade} from "../state-transition/l2-deps/IL2GatewayUpgrade.sol"; + +import {IL2ContractDeployer} from "../common/interfaces/IL2ContractDeployer.sol"; + +import {GatewayHelper} from "./GatewayHelper.sol"; + +// solhint-disable-next-line gas-struct-packing +struct GatewayUpgradeEncodedInput { + IL2ContractDeployer.ForceDeployment[] baseForceDeployments; + bytes fixedForceDeploymentsData; + address ctmDeployer; + address l2GatewayUpgrade; + address oldValidatorTimelock; + address newValidatorTimelock; +} + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice This upgrade will be used to migrate Era to be part of the ZK chain ecosystem contracts. +contract GatewayUpgrade is BaseZkSyncUpgrade { + using PriorityQueue for PriorityQueue.Queue; + using PriorityTree for PriorityTree.Tree; + + address public immutable THIS_ADDRESS; + + constructor() { + THIS_ADDRESS = address(this); + } + + /// @notice The main function that will be called by the upgrade proxy. + /// @param _proposedUpgrade The upgrade to be executed. + function upgrade(ProposedUpgrade calldata _proposedUpgrade) public override returns (bytes32) { + GatewayUpgradeEncodedInput memory encodedInput = abi.decode( + _proposedUpgrade.postUpgradeCalldata, + (GatewayUpgradeEncodedInput) + ); + + bytes32 baseTokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, s.__DEPRECATED_baseToken); + + s.baseTokenAssetId = baseTokenAssetId; + s.priorityTree.setup(s.priorityQueue.getTotalPriorityTxs()); + s.validators[encodedInput.oldValidatorTimelock] = false; + s.validators[encodedInput.newValidatorTimelock] = true; + ProposedUpgrade memory proposedUpgrade = _proposedUpgrade; + + bytes memory gatewayUpgradeCalldata = abi.encodeCall( + IL2GatewayUpgrade.upgrade, + ( + encodedInput.baseForceDeployments, + encodedInput.ctmDeployer, + encodedInput.fixedForceDeploymentsData, + GatewayHelper.getZKChainSpecificForceDeploymentsData(s) + ) + ); + + proposedUpgrade.l2ProtocolUpgradeTx.data = abi.encodeCall( + IComplexUpgrader.upgrade, + (encodedInput.l2GatewayUpgrade, gatewayUpgradeCalldata) + ); + + // slither-disable-next-line controlled-delegatecall + (bool success, ) = THIS_ADDRESS.delegatecall( + abi.encodeWithSelector(IGatewayUpgrade.upgradeExternal.selector, proposedUpgrade) + ); + // solhint-disable-next-line gas-custom-errors + require(success, "GatewayUpgrade: upgrade failed"); + return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE; + } + + /// @notice The function that will be called from this same contract, we need an external call to be able to modify _proposedUpgrade (memory/calldata). + function upgradeExternal(ProposedUpgrade calldata _proposedUpgrade) external { + super.upgrade(_proposedUpgrade); + } +} diff --git a/l1-contracts/contracts/upgrades/GenesisUpgrade.sol b/l1-contracts/contracts/upgrades/GenesisUpgrade.sol deleted file mode 100644 index 5e0ee280a..000000000 --- a/l1-contracts/contracts/upgrades/GenesisUpgrade.sol +++ /dev/null @@ -1,18 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {Diamond} from "../state-transition/libraries/Diamond.sol"; -import {BaseZkSyncUpgradeGenesis} from "./BaseZkSyncUpgradeGenesis.sol"; -import {ProposedUpgrade} from "./IDefaultUpgrade.sol"; - -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -contract GenesisUpgrade is BaseZkSyncUpgradeGenesis { - /// @notice The main function that will be called by the upgrade proxy. - /// @param _proposedUpgrade The upgrade to be executed. - function upgrade(ProposedUpgrade calldata _proposedUpgrade) public override returns (bytes32) { - super.upgrade(_proposedUpgrade); - return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE; - } -} diff --git a/l1-contracts/contracts/upgrades/IGatewayUpgrade.sol b/l1-contracts/contracts/upgrades/IGatewayUpgrade.sol new file mode 100644 index 000000000..eaa74c75b --- /dev/null +++ b/l1-contracts/contracts/upgrades/IGatewayUpgrade.sol @@ -0,0 +1,9 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {ProposedUpgrade} from "./BaseZkSyncUpgrade.sol"; + +interface IGatewayUpgrade { + function upgradeExternal(ProposedUpgrade calldata _upgrade) external returns (bytes32); +} diff --git a/l1-contracts/contracts/upgrades/IL1GenesisUpgrade.sol b/l1-contracts/contracts/upgrades/IL1GenesisUpgrade.sol new file mode 100644 index 000000000..57dd40131 --- /dev/null +++ b/l1-contracts/contracts/upgrades/IL1GenesisUpgrade.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {L2CanonicalTransaction} from "../common/Messaging.sol"; + +interface IL1GenesisUpgrade { + /// @dev emitted when a chain registers and a GenesisUpgrade happens + event GenesisUpgrade( + address indexed _zkChain, + L2CanonicalTransaction _l2Transaction, + uint256 indexed _protocolVersion, + bytes[] _factoryDeps + ); + + function genesisUpgrade( + address _l1GenesisUpgrade, + uint256 _chainId, + uint256 _protocolVersion, + address _l1CtmDeployerAddress, + bytes calldata _forceDeployments, + bytes[] calldata _factoryDeps + ) external returns (bytes32); +} diff --git a/l1-contracts/contracts/upgrades/L1GenesisUpgrade.sol b/l1-contracts/contracts/upgrades/L1GenesisUpgrade.sol new file mode 100644 index 000000000..eea4ef4b7 --- /dev/null +++ b/l1-contracts/contracts/upgrades/L1GenesisUpgrade.sol @@ -0,0 +1,125 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; + +import {Diamond} from "../state-transition/libraries/Diamond.sol"; +import {BaseZkSyncUpgradeGenesis} from "./BaseZkSyncUpgradeGenesis.sol"; +import {ProposedUpgrade} from "./IDefaultUpgrade.sol"; +import {L2CanonicalTransaction} from "../common/Messaging.sol"; +import {IL2GenesisUpgrade} from "../state-transition/l2-deps/IL2GenesisUpgrade.sol"; +import {IL1GenesisUpgrade} from "./IL1GenesisUpgrade.sol"; +import {IComplexUpgrader} from "../state-transition/l2-deps/IComplexUpgrader.sol"; +import {L2_FORCE_DEPLOYER_ADDR, L2_COMPLEX_UPGRADER_ADDR, L2_GENESIS_UPGRADE_ADDR} from "../common/L2ContractAddresses.sol"; //, COMPLEX_UPGRADER_ADDR, GENESIS_UPGRADE_ADDR +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, SYSTEM_UPGRADE_L2_TX_TYPE, PRIORITY_TX_MAX_GAS_LIMIT} from "../common/Config.sol"; +import {SemVer} from "../common/libraries/SemVer.sol"; + +import {IL1SharedBridgeLegacy} from "../bridge/interfaces/IL1SharedBridgeLegacy.sol"; +import {IBridgehub} from "../bridgehub/IBridgehub.sol"; + +import {ZKChainSpecificForceDeploymentsData} from "../state-transition/l2-deps/IL2GenesisUpgrade.sol"; + +import {VerifierParams} from "../state-transition/chain-interfaces/IVerifier.sol"; +import {L2ContractHelper} from "../common/libraries/L2ContractHelper.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +contract L1GenesisUpgrade is IL1GenesisUpgrade, BaseZkSyncUpgradeGenesis { + /// @notice The main function that will be called by the upgrade proxy. + function genesisUpgrade( + address _l1GenesisUpgrade, + uint256 _chainId, + uint256 _protocolVersion, + address _l1CtmDeployerAddress, + bytes calldata _fixedForceDeploymentsData, + bytes[] calldata _factoryDeps + ) public override returns (bytes32) { + L2CanonicalTransaction memory l2ProtocolUpgradeTx; + + { + bytes memory complexUpgraderCalldata; + { + bytes memory additionalForceDeploymentsData = _getZKChainSpecificForceDeploymentsData(); + bytes memory l2GenesisUpgradeCalldata = abi.encodeCall( + IL2GenesisUpgrade.genesisUpgrade, + (_chainId, _l1CtmDeployerAddress, _fixedForceDeploymentsData, additionalForceDeploymentsData) + ); + complexUpgraderCalldata = abi.encodeCall( + IComplexUpgrader.upgrade, + (L2_GENESIS_UPGRADE_ADDR, l2GenesisUpgradeCalldata) + ); + } + + // slither-disable-next-line unused-return + (, uint32 minorVersion, ) = SemVer.unpackSemVer(SafeCast.toUint96(_protocolVersion)); + l2ProtocolUpgradeTx = L2CanonicalTransaction({ + txType: SYSTEM_UPGRADE_L2_TX_TYPE, + from: uint256(uint160(L2_FORCE_DEPLOYER_ADDR)), + to: uint256(uint160(L2_COMPLEX_UPGRADER_ADDR)), + gasLimit: PRIORITY_TX_MAX_GAS_LIMIT, + gasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + maxFeePerGas: uint256(0), + maxPriorityFeePerGas: uint256(0), + paymaster: uint256(0), + // Note, that the protocol version is used as "nonce" for system upgrade transactions + nonce: minorVersion, + value: 0, + reserved: [uint256(0), 0, 0, 0], + data: complexUpgraderCalldata, + signature: new bytes(0), + factoryDeps: L2ContractHelper.hashFactoryDeps(_factoryDeps), + paymasterInput: new bytes(0), + reservedDynamic: new bytes(0) + }); + } + ProposedUpgrade memory proposedUpgrade = ProposedUpgrade({ + l2ProtocolUpgradeTx: l2ProtocolUpgradeTx, + factoryDeps: _factoryDeps, + bootloaderHash: bytes32(0), + defaultAccountHash: bytes32(0), + dualVerifier: address(0), + plonkVerifier: address(0), + fflonkVerifier: address(0), + fflonkProofLength: 0, + verifierParams: VerifierParams({ + recursionNodeLevelVkHash: bytes32(0), + recursionLeafLevelVkHash: bytes32(0), + recursionCircuitsSetVksHash: bytes32(0) + }), + l1ContractsUpgradeCalldata: new bytes(0), + postUpgradeCalldata: new bytes(0), + upgradeTimestamp: 0, + newProtocolVersion: _protocolVersion + }); + + Diamond.FacetCut[] memory emptyArray; + Diamond.DiamondCutData memory cutData = Diamond.DiamondCutData({ + facetCuts: emptyArray, + initAddress: _l1GenesisUpgrade, + initCalldata: abi.encodeCall(this.upgrade, (proposedUpgrade)) + }); + Diamond.diamondCut(cutData); + + emit GenesisUpgrade(address(this), l2ProtocolUpgradeTx, _protocolVersion, _factoryDeps); + return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE; + } + + /// @notice the upgrade function. + function upgrade(ProposedUpgrade calldata _proposedUpgrade) public override returns (bytes32) { + super.upgrade(_proposedUpgrade); + return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE; + } + + function _getZKChainSpecificForceDeploymentsData() internal view returns (bytes memory) { + address sharedBridge = IBridgehub(s.bridgehub).sharedBridge(); + address legacySharedBridge = IL1SharedBridgeLegacy(sharedBridge).l2BridgeAddress(s.chainId); + ZKChainSpecificForceDeploymentsData + memory additionalForceDeploymentsData = ZKChainSpecificForceDeploymentsData({ + baseTokenAssetId: s.baseTokenAssetId, + l2LegacySharedBridge: legacySharedBridge, + l2Weth: address(0) // kl todo + }); + return abi.encode(additionalForceDeploymentsData); + } +} diff --git a/l1-contracts/contracts/upgrades/ZkSyncUpgradeErrors.sol b/l1-contracts/contracts/upgrades/ZkSyncUpgradeErrors.sol new file mode 100644 index 000000000..b30c882e7 --- /dev/null +++ b/l1-contracts/contracts/upgrades/ZkSyncUpgradeErrors.sol @@ -0,0 +1,48 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.21; + +// 0x7a47c9a2 +error InvalidChainId(); +// 0xd7f8c13e +error PreviousUpgradeBatchNotCleared(); +// 0x3c43ccce +error ProtocolMajorVersionNotZero(); +// 0xd7f50a9d +error PatchCantSetUpgradeTxn(); +// 0xd2c011d6 +error L2UpgradeNonceNotEqualToNewProtocolVersion(uint256 nonce, uint256 protocolVersion); +// 0xcb5e4247 +error L2BytecodeHashMismatch(bytes32 expected, bytes32 provided); +// 0x88d7b498 +error ProtocolVersionTooSmall(); +// 0x56d45b12 +error ProtocolVersionTooBig(); +// 0x5c598b60 +error PreviousProtocolMajorVersionNotZero(); +// 0x72ea85ad +error NewProtocolMajorVersionNotZero(); +// 0xd328c12a +error ProtocolVersionMinorDeltaTooBig(uint256 limit, uint256 proposed); +// 0xe1a9736b +error ProtocolVersionDeltaTooLarge(uint256 _proposedDelta, uint256 _maxDelta); +// 0x6d172ab2 +error ProtocolVersionShouldBeGreater(uint256 _oldProtocolVersion, uint256 _newProtocolVersion); +// 0x559cc34e +error PatchUpgradeCantSetDefaultAccount(); +// 0x962fd7d0 +error PatchUpgradeCantSetBootloader(); +// 0x101ba748 +error PreviousUpgradeNotFinalized(bytes32 txHash); +// 0xa0f47245 +error PreviousUpgradeNotCleaned(); +// 0x07218375 +error UnexpectedNumberOfFactoryDeps(); +// 0x76da24b9 +error TooManyFactoryDeps(); +// 0x5cb29523 +error InvalidTxType(uint256 txType); +// 0x08753982 +error TimeNotReached(uint256 expectedTimestamp, uint256 actualTimestamp); +// 0xd92e233d +error ZeroAddress(); diff --git a/l1-contracts/contracts/vendor/AddressAliasHelper.sol b/l1-contracts/contracts/vendor/AddressAliasHelper.sol index 19f3b2d79..b604e9d24 100644 --- a/l1-contracts/contracts/vendor/AddressAliasHelper.sol +++ b/l1-contracts/contracts/vendor/AddressAliasHelper.sol @@ -15,11 +15,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -pragma solidity 0.8.24; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; library AddressAliasHelper { - uint160 constant offset = uint160(0x1111000000000000000000000000000000001111); + uint160 private constant offset = uint160(0x1111000000000000000000000000000000001111); /// @notice Utility function converts the address that submitted a tx /// to the inbox on L1 to the msg.sender viewed on L2 @@ -43,18 +43,20 @@ library AddressAliasHelper { /// @notice Utility function used to calculate the correct refund recipient /// @param _refundRecipient the address that should receive the refund - /// @param _prevMsgSender the address that triggered the tx to L2 + /// @param _originalCaller the address that triggered the tx to L2 /// @return _recipient the corrected address that should receive the refund function actualRefundRecipient( address _refundRecipient, - address _prevMsgSender + address _originalCaller ) internal view returns (address _recipient) { if (_refundRecipient == address(0)) { - // If the `_refundRecipient` is not provided, we use the `_prevMsgSender` as the recipient. + // If the `_refundRecipient` is not provided, we use the `_originalCaller` as the recipient. + // solhint-disable avoid-tx-origin // slither-disable-next-line tx-origin - _recipient = _prevMsgSender == tx.origin - ? _prevMsgSender - : AddressAliasHelper.applyL1ToL2Alias(_prevMsgSender); + _recipient = _originalCaller == tx.origin + ? _originalCaller + : AddressAliasHelper.applyL1ToL2Alias(_originalCaller); + // solhint-enable avoid-tx-origin } else if (_refundRecipient.code.length > 0) { // If the `_refundRecipient` is a smart contract, we apply the L1 to L2 alias to prevent foot guns. _recipient = AddressAliasHelper.applyL1ToL2Alias(_refundRecipient); diff --git a/l1-contracts/deploy-script-config-template/config-deploy-l1.toml b/l1-contracts/deploy-script-config-template/config-deploy-l1.toml index ad8982ffc..4429fbf12 100644 --- a/l1-contracts/deploy-script-config-template/config-deploy-l1.toml +++ b/l1-contracts/deploy-script-config-template/config-deploy-l1.toml @@ -25,6 +25,7 @@ diamond_init_priority_tx_max_pubdata = 99000 diamond_init_minimal_l2_gas_price = 250000000 bootloader_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" default_aa_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +force_deployments_data = "0x" [tokens] token_weth_address = "0x0000000000000000000000000000000000000000" diff --git a/l1-contracts/deploy-script-config-template/config-prepare-registration-calldata.toml b/l1-contracts/deploy-script-config-template/config-prepare-registration-calldata.toml new file mode 100644 index 000000000..99ff2e7ce --- /dev/null +++ b/l1-contracts/deploy-script-config-template/config-prepare-registration-calldata.toml @@ -0,0 +1,12 @@ +[deployed_addresses] +state_transition_proxy_addr = "0x635853efd1d388f597feb9fe06e666efda397911" +erc20_bridge_proxy_addr = "0x147CDc5DD347bA141545Ad08fce748484Ed7fDbA" + + +[chain] +chain_id = 123 +era_chain_id = 9 +admin = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" +diamond_cut_data = "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000060000000000000000000000000cf3cd7fa8948f7748d20f7f33e85a0320b6c5d4d0000000000000000000000000000000000000000000000000000000000000c400000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000008800000000000000000000000000000000000000000000000000000000000000a200000000000000000000000003fe9b8f276c1a2e26d2190f2d9fd1897a04eb90a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000d0e18b681000000000000000000000000000000000000000000000000000000001733894500000000000000000000000000000000000000000000000000000000fc57565f000000000000000000000000000000000000000000000000000000001cc5d1030000000000000000000000000000000000000000000000000000000021f603d700000000000000000000000000000000000000000000000000000000235d9eb50000000000000000000000000000000000000000000000000000000027ae4c16000000000000000000000000000000000000000000000000000000004623c91d000000000000000000000000000000000000000000000000000000004dd18bf50000000000000000000000000000000000000000000000000000000064bf8d6600000000000000000000000000000000000000000000000000000000a9f6d94100000000000000000000000000000000000000000000000000000000be6f11cf00000000000000000000000000000000000000000000000000000000e76db865000000000000000000000000000000000000000000000000000000000000000000000000000000000e70df06c0938e724ce23e49b3c4f01abed6bbfc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000002906d49e5b00000000000000000000000000000000000000000000000000000000086a56f8000000000000000000000000000000000000000000000000000000000ec6b0b700000000000000000000000000000000000000000000000000000000fe26699e0000000000000000000000000000000000000000000000000000000018e3a941000000000000000000000000000000000000000000000000000000001de72e340000000000000000000000000000000000000000000000000000000029b98c670000000000000000000000000000000000000000000000000000000033ce93fe000000000000000000000000000000000000000000000000000000003591c1a000000000000000000000000000000000000000000000000000000000396073820000000000000000000000000000000000000000000000000000000046657fe90000000000000000000000000000000000000000000000000000000052ef6b2c000000000000000000000000000000000000000000000000000000005518c73b0000000000000000000000000000000000000000000000000000000056142d7a00000000000000000000000000000000000000000000000000000000631f4bac000000000000000000000000000000000000000000000000000000006e9960c30000000000000000000000000000000000000000000000000000000074f4d30d0000000000000000000000000000000000000000000000000000000079823c9a000000000000000000000000000000000000000000000000000000007a0ed627000000000000000000000000000000000000000000000000000000007b30c8da0000000000000000000000000000000000000000000000000000000098acd7a6000000000000000000000000000000000000000000000000000000009cd939e4000000000000000000000000000000000000000000000000000000009d1b5a8100000000000000000000000000000000000000000000000000000000a1954fc500000000000000000000000000000000000000000000000000000000adfca15e00000000000000000000000000000000000000000000000000000000af6a2dcd00000000000000000000000000000000000000000000000000000000b22dd78e00000000000000000000000000000000000000000000000000000000b8c2f66f00000000000000000000000000000000000000000000000000000000bd7c541200000000000000000000000000000000000000000000000000000000c3bbd2d700000000000000000000000000000000000000000000000000000000cdffacc600000000000000000000000000000000000000000000000000000000d046815600000000000000000000000000000000000000000000000000000000d86970d800000000000000000000000000000000000000000000000000000000db1f0bf900000000000000000000000000000000000000000000000000000000e5355c7500000000000000000000000000000000000000000000000000000000e81e0ba100000000000000000000000000000000000000000000000000000000ea6c029c00000000000000000000000000000000000000000000000000000000ef3f0bae00000000000000000000000000000000000000000000000000000000f5c1182c00000000000000000000000000000000000000000000000000000000facd743b00000000000000000000000000000000000000000000000000000000fd791f3c000000000000000000000000000000000000000000000000000000000000000000000000000000006e2e077311fb218b80404298c9a227c42b792b610000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000008042901c70000000000000000000000000000000000000000000000000000000012f43dab00000000000000000000000000000000000000000000000000000000eb67241900000000000000000000000000000000000000000000000000000000263b7f8e000000000000000000000000000000000000000000000000000000006c0960f900000000000000000000000000000000000000000000000000000000b473318e00000000000000000000000000000000000000000000000000000000c924de3500000000000000000000000000000000000000000000000000000000e4948f4300000000000000000000000000000000000000000000000000000000000000000000000000000000a226529de193f2153b25d7d9ad774689b78c301e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000080f23da4300000000000000000000000000000000000000000000000000000000c3d93e7c000000000000000000000000000000000000000000000000000000006edd4f12000000000000000000000000000000000000000000000000000000006f497ac600000000000000000000000000000000000000000000000000000000701f58c5000000000000000000000000000000000000000000000000000000007f61885c0000000000000000000000000000000000000000000000000000000097c09d3400000000000000000000000000000000000000000000000000000000c37533bb0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001c00000000000000000000000007bd175a72991f0a30ed333eaf1cb215c42e7492c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010008e742608b21bf7eb23c1a9d0602047e3618b464c9b59c0fba3b3d7ab66e01000563374c277a2c1e34659a2a1e87371bb6d852ce142022d497bfb50b9e3200000000000000000000000000000000000000000000000000000000044aa200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f4240000000000000000000000000000000000000000000000000000000000001d4c00000000000000000000000000000000000000000000000000000000004c4b40000000000000000000000000000000000000000000000000000000000000182b8000000000000000000000000000000000000000000000000000000000ee6b280000000000000000000000000baa109e4e90cc08a3c5069a0173aace4e934d57e" +bridgehub_create_new_chain_salt = 0 +base_token_addr = "0x0000000000000000000000000000000000000001" diff --git a/l1-contracts/deploy-script-config-template/register-hyperchain.toml b/l1-contracts/deploy-script-config-template/register-hyperchain.toml index dd93f34a4..bc27ace6d 100644 --- a/l1-contracts/deploy-script-config-template/register-hyperchain.toml +++ b/l1-contracts/deploy-script-config-template/register-hyperchain.toml @@ -10,3 +10,4 @@ base_token_gas_price_multiplier_nominator = 1 base_token_gas_price_multiplier_denominator = 1 governance_min_delay = 0 governance_security_council_address = "0x0000000000000000000000000000000000000000" +force_deployments_data = "0x" diff --git a/l1-contracts/deploy-scripts/AcceptAdmin.s.sol b/l1-contracts/deploy-scripts/AcceptAdmin.s.sol index be08c5295..690832a53 100644 --- a/l1-contracts/deploy-scripts/AcceptAdmin.s.sol +++ b/l1-contracts/deploy-scripts/AcceptAdmin.s.sol @@ -1,15 +1,38 @@ -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.0; +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.21; import {Script} from "forge-std/Script.sol"; -import {Ownable2Step} from "@openzeppelin/contracts/access/Ownable2Step.sol"; -import {IZkSyncHyperchain} from "contracts/state-transition/chain-interfaces/IZkSyncHyperchain.sol"; +import {Ownable2Step} from "@openzeppelin/contracts-v4/access/Ownable2Step.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; import {IChainAdmin} from "contracts/governance/IChainAdmin.sol"; +import {Call} from "contracts/governance/Common.sol"; import {Utils} from "./Utils.sol"; +import {stdToml} from "forge-std/StdToml.sol"; + +bytes32 constant SET_TOKEN_MULTIPLIER_SETTER_ROLE = keccak256("SET_TOKEN_MULTIPLIER_SETTER_ROLE"); contract AcceptAdmin is Script { + using stdToml for string; + + struct Config { + address admin; + address governor; + } + + Config internal config; + + function initConfig() public { + string memory root = vm.projectRoot(); + string memory path = string.concat(root, "/script-config/config-accept-admin.toml"); + string memory toml = vm.readFile(path); + config.admin = toml.readAddress("$.target_addr"); + config.governor = toml.readAddress("$.governor"); + } + // This function should be called by the owner to accept the admin role function governanceAcceptOwner(address governor, address target) public { Ownable2Step adminContract = Ownable2Step(target); @@ -25,7 +48,7 @@ contract AcceptAdmin is Script { // This function should be called by the owner to accept the admin role function governanceAcceptAdmin(address governor, address target) public { - IZkSyncHyperchain adminContract = IZkSyncHyperchain(target); + IZKChain adminContract = IZKChain(target); Utils.executeUpgrade({ _governor: governor, _salt: bytes32(0), @@ -38,10 +61,10 @@ contract AcceptAdmin is Script { // This function should be called by the owner to accept the admin role function chainAdminAcceptAdmin(ChainAdmin chainAdmin, address target) public { - IZkSyncHyperchain adminContract = IZkSyncHyperchain(target); + IZKChain adminContract = IZKChain(target); - IChainAdmin.Call[] memory calls = new IChainAdmin.Call[](1); - calls[0] = IChainAdmin.Call({target: target, value: 0, data: abi.encodeCall(adminContract.acceptAdmin, ())}); + Call[] memory calls = new Call[](1); + calls[0] = Call({target: target, value: 0, data: abi.encodeCall(adminContract.acceptAdmin, ())}); vm.startBroadcast(); chainAdmin.multicall(calls, true); @@ -49,11 +72,50 @@ contract AcceptAdmin is Script { } // This function should be called by the owner to update token multiplier setter role - function chainSetTokenMultiplierSetter(address chainAdmin, address target) public { - IChainAdmin admin = IChainAdmin(chainAdmin); + function chainSetTokenMultiplierSetter( + address accessControlRestriction, + address diamondProxyAddress, + address setter + ) public { + AccessControlRestriction restriction = AccessControlRestriction(accessControlRestriction); + + if ( + restriction.requiredRoles(diamondProxyAddress, IAdmin.setTokenMultiplier.selector) != + SET_TOKEN_MULTIPLIER_SETTER_ROLE + ) { + vm.startBroadcast(); + restriction.setRequiredRoleForCall( + diamondProxyAddress, + IAdmin.setTokenMultiplier.selector, + SET_TOKEN_MULTIPLIER_SETTER_ROLE + ); + vm.stopBroadcast(); + } + + if (!restriction.hasRole(SET_TOKEN_MULTIPLIER_SETTER_ROLE, setter)) { + vm.startBroadcast(); + restriction.grantRole(SET_TOKEN_MULTIPLIER_SETTER_ROLE, setter); + vm.stopBroadcast(); + } + } + + function setDAValidatorPair( + ChainAdmin chainAdmin, + address target, + address l1DaValidator, + address l2DaValidator + ) public { + IZKChain adminContract = IZKChain(target); + + Call[] memory calls = new Call[](1); + calls[0] = Call({ + target: target, + value: 0, + data: abi.encodeCall(adminContract.setDAValidatorPair, (l1DaValidator, l2DaValidator)) + }); vm.startBroadcast(); - admin.setTokenMultiplierSetter(target); + chainAdmin.multicall(calls, true); vm.stopBroadcast(); } } diff --git a/l1-contracts/deploy-scripts/DecentralizeGovernanceUpgradeScript.s.sol b/l1-contracts/deploy-scripts/DecentralizeGovernanceUpgradeScript.s.sol index f0fc73617..39ecc4efd 100644 --- a/l1-contracts/deploy-scripts/DecentralizeGovernanceUpgradeScript.s.sol +++ b/l1-contracts/deploy-scripts/DecentralizeGovernanceUpgradeScript.s.sol @@ -1,26 +1,29 @@ // SPDX-License-Identifier: MIT +// solhint-disable reason-string, gas-custom-errors pragma solidity 0.8.24; import {Script} from "forge-std/Script.sol"; -import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol"; -import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; +import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; import {Governance} from "contracts/governance/Governance.sol"; -import {IStateTransitionManager} from "contracts/state-transition/IStateTransitionManager.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; import {Utils} from "./Utils.sol"; contract DecentralizeGovernanceUpgradeScript is Script { - function upgradeSTM( + function upgradeCTM( ProxyAdmin _proxyAdmin, - ITransparentUpgradeableProxy _stmProxy, + ITransparentUpgradeableProxy _ctmProxy, Governance _governance, - address _newStmImpl + address _newCtmImpl ) public { - require(_proxyAdmin.getProxyAdmin(_stmProxy) == address(_proxyAdmin)); - require(_proxyAdmin.owner() == address(_governance)); + // solhint-disable-next-line gas-custom-errors + require(_proxyAdmin.getProxyAdmin(_ctmProxy) == address(_proxyAdmin), "Proxy admin incorrect"); + // solhint-disable-next-line gas-custom-errors + require(_proxyAdmin.owner() == address(_governance), "Proxy admin owner incorrect"); - bytes memory proxyAdminUpgradeData = abi.encodeCall(ProxyAdmin.upgrade, (_stmProxy, _newStmImpl)); + bytes memory proxyAdminUpgradeData = abi.encodeCall(ProxyAdmin.upgrade, (_ctmProxy, _newCtmImpl)); Utils.executeUpgrade({ _governor: address(_governance), @@ -33,7 +36,7 @@ contract DecentralizeGovernanceUpgradeScript is Script { } function setPendingAdmin(address _target, Governance _governance, address _pendingAdmin) public { - bytes memory upgradeData = abi.encodeCall(IStateTransitionManager.setPendingAdmin, (_pendingAdmin)); + bytes memory upgradeData = abi.encodeCall(IChainTypeManager.setPendingAdmin, (_pendingAdmin)); Utils.executeUpgrade({ _governor: address(_governance), _salt: bytes32(0), diff --git a/l1-contracts/deploy-scripts/DeployErc20.s.sol b/l1-contracts/deploy-scripts/DeployErc20.s.sol index 69a8ab649..400c1ff1f 100644 --- a/l1-contracts/deploy-scripts/DeployErc20.s.sol +++ b/l1-contracts/deploy-scripts/DeployErc20.s.sol @@ -13,6 +13,7 @@ import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; import {WETH9} from "contracts/dev-contracts/WETH9.sol"; import {Utils} from "./Utils.sol"; +import {MintFailed} from "./ZkSyncScriptErrors.sol"; contract DeployErc20Script is Script { using stdToml for string; @@ -34,7 +35,7 @@ contract DeployErc20Script is Script { uint256 mint; } - Config config; + Config internal config; function run() public { console.log("Deploying ERC20 Tokens"); @@ -44,13 +45,22 @@ contract DeployErc20Script is Script { saveOutput(); } + function getTokensAddresses() public view returns (address[] memory) { + uint256 tokensLength = config.tokens.length; + address[] memory addresses = new address[](tokensLength); + for (uint256 i = 0; i < tokensLength; ++i) { + addresses[i] = config.tokens[i].addr; + } + return addresses; + } + function initializeConfig() internal { config.deployerAddress = msg.sender; string memory root = vm.projectRoot(); // Grab config from output of l1 deployment - string memory path = string.concat(root, "/script-out/output-deploy-l1.toml"); + string memory path = string.concat(root, vm.envString("L1_OUTPUT")); string memory toml = vm.readFile(path); // Config file must be parsed key by key, otherwise values returned @@ -60,13 +70,14 @@ contract DeployErc20Script is Script { config.create2FactorySalt = vm.parseTomlBytes32(toml, "$.create2_factory_salt"); // Grab config from custom config file - path = string.concat(root, "/script-config/config-deploy-erc20.toml"); + path = string.concat(root, vm.envString("TOKENS_CONFIG")); toml = vm.readFile(path); config.additionalAddressesForMinting = vm.parseTomlAddressArray(toml, "$.additional_addresses_for_minting"); string[] memory tokens = vm.parseTomlKeys(toml, "$.tokens"); - for (uint256 i = 0; i < tokens.length; i++) { + uint256 tokensLength = tokens.length; + for (uint256 i = 0; i < tokensLength; ++i) { TokenDescription memory token; string memory key = string.concat("$.tokens.", tokens[i]); token.name = toml.readString(string.concat(key, ".name")); @@ -79,7 +90,8 @@ contract DeployErc20Script is Script { } function deployTokens() internal { - for (uint256 i = 0; i < config.tokens.length; i++) { + uint256 tokensLength = config.tokens.length; + for (uint256 i = 0; i < tokensLength; ++i) { TokenDescription storage token = config.tokens[i]; console.log("Deploying token:", token.name); address tokenAddress = deployErc20({ @@ -116,12 +128,18 @@ contract DeployErc20Script is Script { if (mint > 0) { vm.broadcast(); additionalAddressesForMinting.push(config.deployerAddress); - for (uint256 i = 0; i < additionalAddressesForMinting.length; i++) { + uint256 addressMintListLength = additionalAddressesForMinting.length; + for (uint256 i = 0; i < addressMintListLength; ++i) { (bool success, ) = tokenAddress.call( abi.encodeWithSignature("mint(address,uint256)", additionalAddressesForMinting[i], mint) ); - require(success, "Mint failed"); + if (!success) { + revert MintFailed(); + } console.log("Minting to:", additionalAddressesForMinting[i]); + if (!success) { + revert MintFailed(); + } } } @@ -130,7 +148,8 @@ contract DeployErc20Script is Script { function saveOutput() internal { string memory tokens = ""; - for (uint256 i = 0; i < config.tokens.length; i++) { + uint256 tokensLength = config.tokens.length; + for (uint256 i = 0; i < tokensLength; ++i) { TokenDescription memory token = config.tokens[i]; vm.serializeString(token.symbol, "name", token.name); vm.serializeString(token.symbol, "symbol", token.symbol); @@ -151,4 +170,7 @@ contract DeployErc20Script is Script { function deployViaCreate2(bytes memory _bytecode) internal returns (address) { return Utils.deployViaCreate2(_bytecode, config.create2FactorySalt, config.create2FactoryAddr); } + + // add this to be excluded from coverage report + function test() internal {} } diff --git a/l1-contracts/deploy-scripts/DeployL1.s.sol b/l1-contracts/deploy-scripts/DeployL1.s.sol index f4de831a8..8a10c0017 100644 --- a/l1-contracts/deploy-scripts/DeployL1.s.sol +++ b/l1-contracts/deploy-scripts/DeployL1.s.sol @@ -1,131 +1,93 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -// solhint-disable no-console +// solhint-disable no-console, gas-custom-errors import {Script, console2 as console} from "forge-std/Script.sol"; import {stdToml} from "forge-std/StdToml.sol"; -import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol"; -import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; - -import {Utils} from "./Utils.sol"; +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {StateTransitionDeployedAddresses, Utils, L2_BRIDGEHUB_ADDRESS, L2_ASSET_ROUTER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS, L2_MESSAGE_ROOT_ADDRESS} from "./Utils.sol"; import {Multicall3} from "contracts/dev-contracts/Multicall3.sol"; import {Verifier} from "contracts/state-transition/Verifier.sol"; import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; import {VerifierParams, IVerifier} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; import {DefaultUpgrade} from "contracts/upgrades/DefaultUpgrade.sol"; import {Governance} from "contracts/governance/Governance.sol"; +import {L1GenesisUpgrade} from "contracts/upgrades/L1GenesisUpgrade.sol"; import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; -import {GenesisUpgrade} from "contracts/upgrades/GenesisUpgrade.sol"; import {ValidatorTimelock} from "contracts/state-transition/ValidatorTimelock.sol"; import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {CTMDeploymentTracker} from "contracts/bridgehub/CTMDeploymentTracker.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; -import {StateTransitionManager} from "contracts/state-transition/StateTransitionManager.sol"; -import {StateTransitionManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IStateTransitionManager.sol"; -import {IStateTransitionManager} from "contracts/state-transition/IStateTransitionManager.sol"; +import {ChainTypeManager} from "contracts/state-transition/ChainTypeManager.sol"; +import {ChainTypeManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IChainTypeManager.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {InitializeDataNewChain as DiamondInitializeDataNewChain} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; -import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; -import {L1SharedBridge} from "contracts/bridge/L1SharedBridge.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; import {L1ERC20Bridge} from "contracts/bridge/L1ERC20Bridge.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; - -contract DeployL1Script is Script { +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {AddressHasNoCode} from "./ZkSyncScriptErrors.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {IL2ContractDeployer} from "contracts/common/interfaces/IL2ContractDeployer.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {IL1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {L1NullifierDev} from "contracts/dev-contracts/L1NullifierDev.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L2ContractsBytecodesLib} from "./L2ContractsBytecodesLib.sol"; +import {ValidiumL1DAValidator} from "contracts/state-transition/data-availability/ValidiumL1DAValidator.sol"; + +import {DeployUtils, GeneratedData, Config, DeployedAddresses, FixedForceDeploymentsData} from "./DeployUtils.s.sol"; + +contract DeployL1Script is Script, DeployUtils { using stdToml for string; - address constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; - address constant DETERMINISTIC_CREATE2_ADDRESS = 0x4e59b44847b379578588920cA78FbF26c0B4956C; - - struct DeployedAddresses { - BridgehubDeployedAddresses bridgehub; - StateTransitionDeployedAddresses stateTransition; - BridgesDeployedAddresses bridges; - address transparentProxyAdmin; - address governance; - address chainAdmin; - address blobVersionedHashRetriever; - address validatorTimelock; - address create2Factory; - } - - struct BridgehubDeployedAddresses { - address bridgehubImplementation; - address bridgehubProxy; - } - - struct StateTransitionDeployedAddresses { - address stateTransitionProxy; - address stateTransitionImplementation; - address verifier; - address adminFacet; - address mailboxFacet; - address executorFacet; - address gettersFacet; - address diamondInit; - address genesisUpgrade; - address defaultUpgrade; - address diamondProxy; - } - - struct BridgesDeployedAddresses { - address erc20BridgeImplementation; - address erc20BridgeProxy; - address sharedBridgeImplementation; - address sharedBridgeProxy; - } - - struct Config { - uint256 l1ChainId; - uint256 eraChainId; - address deployerAddress; - address ownerAddress; - bool testnetVerifier; - ContractsConfig contracts; - TokensConfig tokens; - } - - struct ContractsConfig { - bytes32 create2FactorySalt; - address create2FactoryAddr; - address multicall3Addr; - uint256 validatorTimelockExecutionDelay; - bytes32 genesisRoot; - uint256 genesisRollupLeafIndex; - bytes32 genesisBatchCommitment; - uint256 latestProtocolVersion; - bytes32 recursionNodeLevelVkHash; - bytes32 recursionLeafLevelVkHash; - bytes32 recursionCircuitsSetVksHash; - uint256 priorityTxMaxGasLimit; - PubdataPricingMode diamondInitPubdataPricingMode; - uint256 diamondInitBatchOverheadL1Gas; - uint256 diamondInitMaxPubdataPerBatch; - uint256 diamondInitMaxL2GasPerBatch; - uint256 diamondInitPriorityTxMaxPubdata; - uint256 diamondInitMinimalL2GasPrice; - address governanceSecurityCouncilAddress; - uint256 governanceMinDelay; - uint256 maxNumberOfChains; - bytes diamondCutData; - bytes32 bootloaderHash; - bytes32 defaultAAHash; - } - - struct TokensConfig { - address tokenWethAddress; - } - - Config config; - DeployedAddresses addresses; + address internal constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; function run() public { console.log("Deploying L1 contracts"); - initializeConfig(); + runInner("/script-config/config-deploy-l1.toml", "/script-out/output-deploy-l1.toml"); + } + + function runForTest() public { + runInner(vm.envString("L1_CONFIG"), vm.envString("L1_OUTPUT")); + } + + function getAddresses() public view returns (DeployedAddresses memory) { + return addresses; + } + + function getConfig() public view returns (Config memory) { + return config; + } + + function runInner(string memory inputPath, string memory outputPath) internal { + string memory root = vm.projectRoot(); + inputPath = string.concat(root, inputPath); + outputPath = string.concat(root, outputPath); + + saveDiamondSelectors(); + initializeConfig(inputPath); instantiateCreate2Factory(); deployIfNeededMulticall3(); @@ -134,356 +96,162 @@ contract DeployL1Script is Script { deployDefaultUpgrade(); deployGenesisUpgrade(); + deployDAValidators(); deployValidatorTimelock(); deployGovernance(); deployChainAdmin(); deployTransparentProxyAdmin(); deployBridgehubContract(); - deployBlobVersionedHashRetriever(); - deployStateTransitionManagerContract(); - setStateTransitionManagerInValidatorTimelock(); - - deployDiamondProxy(); + deployMessageRootContract(); + deployL1NullifierContracts(); deploySharedBridgeContracts(); + deployBridgedStandardERC20Implementation(); + deployBridgedTokenBeacon(); + deployL1NativeTokenVaultImplementation(); + deployL1NativeTokenVaultProxy(); deployErc20BridgeImplementation(); deployErc20BridgeProxy(); updateSharedBridge(); + deployCTMDeploymentTracker(); + setBridgehubParams(); + + initializeGeneratedData(); + + deployBlobVersionedHashRetriever(); + deployChainTypeManagerContract(); + registerChainTypeManager(); + setChainTypeManagerInValidatorTimelock(); updateOwners(); - saveOutput(); + saveOutput(outputPath); } - function initializeConfig() internal { - string memory root = vm.projectRoot(); - string memory path = string.concat(root, "/script-config/config-deploy-l1.toml"); - string memory toml = vm.readFile(path); - - config.l1ChainId = block.chainid; - config.deployerAddress = msg.sender; - - // Config file must be parsed key by key, otherwise values returned - // are parsed alfabetically and not by key. - // https://book.getfoundry.sh/cheatcodes/parse-toml - config.eraChainId = toml.readUint("$.era_chain_id"); - config.ownerAddress = toml.readAddress("$.owner_address"); - config.testnetVerifier = toml.readBool("$.testnet_verifier"); - - config.contracts.governanceSecurityCouncilAddress = toml.readAddress( - "$.contracts.governance_security_council_address" - ); - config.contracts.governanceMinDelay = toml.readUint("$.contracts.governance_min_delay"); - config.contracts.maxNumberOfChains = toml.readUint("$.contracts.max_number_of_chains"); - config.contracts.create2FactorySalt = toml.readBytes32("$.contracts.create2_factory_salt"); - if (vm.keyExistsToml(toml, "$.contracts.create2_factory_addr")) { - config.contracts.create2FactoryAddr = toml.readAddress("$.contracts.create2_factory_addr"); - } - config.contracts.validatorTimelockExecutionDelay = toml.readUint( - "$.contracts.validator_timelock_execution_delay" - ); - config.contracts.genesisRoot = toml.readBytes32("$.contracts.genesis_root"); - config.contracts.genesisRollupLeafIndex = toml.readUint("$.contracts.genesis_rollup_leaf_index"); - config.contracts.genesisBatchCommitment = toml.readBytes32("$.contracts.genesis_batch_commitment"); - config.contracts.latestProtocolVersion = toml.readUint("$.contracts.latest_protocol_version"); - config.contracts.recursionNodeLevelVkHash = toml.readBytes32("$.contracts.recursion_node_level_vk_hash"); - config.contracts.recursionLeafLevelVkHash = toml.readBytes32("$.contracts.recursion_leaf_level_vk_hash"); - config.contracts.recursionCircuitsSetVksHash = toml.readBytes32("$.contracts.recursion_circuits_set_vks_hash"); - config.contracts.priorityTxMaxGasLimit = toml.readUint("$.contracts.priority_tx_max_gas_limit"); - config.contracts.diamondInitPubdataPricingMode = PubdataPricingMode( - toml.readUint("$.contracts.diamond_init_pubdata_pricing_mode") - ); - config.contracts.diamondInitBatchOverheadL1Gas = toml.readUint( - "$.contracts.diamond_init_batch_overhead_l1_gas" - ); - config.contracts.diamondInitMaxPubdataPerBatch = toml.readUint( - "$.contracts.diamond_init_max_pubdata_per_batch" - ); - config.contracts.diamondInitMaxL2GasPerBatch = toml.readUint("$.contracts.diamond_init_max_l2_gas_per_batch"); - config.contracts.diamondInitPriorityTxMaxPubdata = toml.readUint( - "$.contracts.diamond_init_priority_tx_max_pubdata" - ); - config.contracts.diamondInitMinimalL2GasPrice = toml.readUint("$.contracts.diamond_init_minimal_l2_gas_price"); - config.contracts.defaultAAHash = toml.readBytes32("$.contracts.default_aa_hash"); - config.contracts.bootloaderHash = toml.readBytes32("$.contracts.bootloader_hash"); - - config.tokens.tokenWethAddress = toml.readAddress("$.tokens.token_weth_address"); - } - - function instantiateCreate2Factory() internal { - address contractAddress; - - bool isDeterministicDeployed = DETERMINISTIC_CREATE2_ADDRESS.code.length > 0; - bool isConfigured = config.contracts.create2FactoryAddr != address(0); - - if (isConfigured) { - if (config.contracts.create2FactoryAddr.code.length == 0) { - revert("Create2Factory configured address is empty"); - } - contractAddress = config.contracts.create2FactoryAddr; - console.log("Using configured Create2Factory address:", contractAddress); - } else if (isDeterministicDeployed) { - contractAddress = DETERMINISTIC_CREATE2_ADDRESS; - console.log("Using deterministic Create2Factory address:", contractAddress); - } else { - contractAddress = Utils.deployCreate2Factory(); - console.log("Create2Factory deployed at:", contractAddress); - } - - addresses.create2Factory = contractAddress; + function initializeGeneratedData() internal { + generatedData.forceDeploymentsData = prepareForceDeploymentsData(); } function deployIfNeededMulticall3() internal { // Multicall3 is already deployed on public networks if (MULTICALL3_ADDRESS.code.length == 0) { - address contractAddress = deployViaCreate2(type(Multicall3).creationCode); + address contractAddress = deployViaCreate2(type(Multicall3).creationCode, ""); console.log("Multicall3 deployed at:", contractAddress); config.contracts.multicall3Addr = contractAddress; } else { config.contracts.multicall3Addr = MULTICALL3_ADDRESS; } } + function deployDAValidators() internal { + address contractAddress = deployViaCreate2(Utils.readRollupDAValidatorBytecode(), ""); + console.log("L1RollupDAValidator deployed at:", contractAddress); + addresses.daAddresses.l1RollupDAValidator = contractAddress; - function deployVerifier() internal { - bytes memory code; - if (config.testnetVerifier) { - code = type(TestnetVerifier).creationCode; - } else { - code = type(Verifier).creationCode; - } - address contractAddress = deployViaCreate2(code); - console.log("Verifier deployed at:", contractAddress); - addresses.stateTransition.verifier = contractAddress; - } - - function deployDefaultUpgrade() internal { - address contractAddress = deployViaCreate2(type(DefaultUpgrade).creationCode); - console.log("DefaultUpgrade deployed at:", contractAddress); - addresses.stateTransition.defaultUpgrade = contractAddress; + contractAddress = deployViaCreate2(type(ValidiumL1DAValidator).creationCode, ""); + console.log("L1ValidiumDAValidator deployed at:", contractAddress); + addresses.daAddresses.l1ValidiumDAValidator = contractAddress; } - - function deployGenesisUpgrade() internal { - address contractAddress = deployViaCreate2(type(GenesisUpgrade).creationCode); - console.log("GenesisUpgrade deployed at:", contractAddress); - addresses.stateTransition.genesisUpgrade = contractAddress; - } - - function deployValidatorTimelock() internal { - uint32 executionDelay = uint32(config.contracts.validatorTimelockExecutionDelay); - bytes memory bytecode = abi.encodePacked( - type(ValidatorTimelock).creationCode, - abi.encode(config.deployerAddress, executionDelay, config.eraChainId) + function deployBridgehubContract() internal { + address bridgehubImplementation = deployViaCreate2( + type(Bridgehub).creationCode, + abi.encode(config.l1ChainId, config.ownerAddress, (config.contracts.maxNumberOfChains)) ); - address contractAddress = deployViaCreate2(bytecode); - console.log("ValidatorTimelock deployed at:", contractAddress); - addresses.validatorTimelock = contractAddress; - } + console.log("Bridgehub Implementation deployed at:", bridgehubImplementation); + addresses.bridgehub.bridgehubImplementation = bridgehubImplementation; - function deployGovernance() internal { - bytes memory bytecode = abi.encodePacked( - type(Governance).creationCode, + address bridgehubProxy = deployViaCreate2( + type(TransparentUpgradeableProxy).creationCode, abi.encode( - config.ownerAddress, - config.contracts.governanceSecurityCouncilAddress, - config.contracts.governanceMinDelay + bridgehubImplementation, + addresses.transparentProxyAdmin, + abi.encodeCall(Bridgehub.initialize, (config.deployerAddress)) ) ); - address contractAddress = deployViaCreate2(bytecode); - console.log("Governance deployed at:", contractAddress); - addresses.governance = contractAddress; + console.log("Bridgehub Proxy deployed at:", bridgehubProxy); + addresses.bridgehub.bridgehubProxy = bridgehubProxy; } - function deployChainAdmin() internal { - bytes memory bytecode = abi.encodePacked( - type(ChainAdmin).creationCode, - abi.encode(config.ownerAddress, address(0)) + function deployMessageRootContract() internal { + address messageRootImplementation = deployViaCreate2( + type(MessageRoot).creationCode, + abi.encode(addresses.bridgehub.bridgehubProxy) ); - address contractAddress = deployViaCreate2(bytecode); - console.log("ChainAdmin deployed at:", contractAddress); - addresses.chainAdmin = contractAddress; - } + console.log("MessageRoot Implementation deployed at:", messageRootImplementation); + addresses.bridgehub.messageRootImplementation = messageRootImplementation; - function deployTransparentProxyAdmin() internal { - vm.startBroadcast(); - ProxyAdmin proxyAdmin = new ProxyAdmin(); - proxyAdmin.transferOwnership(addresses.governance); - vm.stopBroadcast(); - console.log("Transparent Proxy Admin deployed at:", address(proxyAdmin)); - addresses.transparentProxyAdmin = address(proxyAdmin); + address messageRootProxy = deployViaCreate2( + type(TransparentUpgradeableProxy).creationCode, + abi.encode( + messageRootImplementation, + addresses.transparentProxyAdmin, + abi.encodeCall(MessageRoot.initialize, ()) + ) + ); + console.log("Message Root Proxy deployed at:", messageRootProxy); + addresses.bridgehub.messageRootProxy = messageRootProxy; } - function deployBridgehubContract() internal { - address bridgehubImplementation = deployViaCreate2(type(Bridgehub).creationCode); - console.log("Bridgehub Implementation deployed at:", bridgehubImplementation); - addresses.bridgehub.bridgehubImplementation = bridgehubImplementation; + function deployCTMDeploymentTracker() internal { + address ctmDTImplementation = deployViaCreate2( + type(CTMDeploymentTracker).creationCode, + abi.encode(addresses.bridgehub.bridgehubProxy, addresses.bridges.sharedBridgeProxy) + ); + console.log("CTM Deployment Tracker Implementation deployed at:", ctmDTImplementation); + addresses.bridgehub.ctmDeploymentTrackerImplementation = ctmDTImplementation; - bytes memory bytecode = abi.encodePacked( + address ctmDTProxy = deployViaCreate2( type(TransparentUpgradeableProxy).creationCode, abi.encode( - bridgehubImplementation, + ctmDTImplementation, addresses.transparentProxyAdmin, - abi.encodeCall(Bridgehub.initialize, (config.deployerAddress)) + abi.encodeCall(CTMDeploymentTracker.initialize, (config.deployerAddress)) ) ); - address bridgehubProxy = deployViaCreate2(bytecode); - console.log("Bridgehub Proxy deployed at:", bridgehubProxy); - addresses.bridgehub.bridgehubProxy = bridgehubProxy; + console.log("CTM Deployment Tracker Proxy deployed at:", ctmDTProxy); + addresses.bridgehub.ctmDeploymentTrackerProxy = ctmDTProxy; } function deployBlobVersionedHashRetriever() internal { // solc contracts/state-transition/utils/blobVersionedHashRetriever.yul --strict-assembly --bin bytes memory bytecode = hex"600b600b5f39600b5ff3fe5f358049805f5260205ff3"; - address contractAddress = deployViaCreate2(bytecode); + address contractAddress = deployViaCreate2(bytecode, ""); console.log("BlobVersionedHashRetriever deployed at:", contractAddress); addresses.blobVersionedHashRetriever = contractAddress; } + function registerChainTypeManager() internal { + Bridgehub bridgehub = Bridgehub(addresses.bridgehub.bridgehubProxy); + vm.startBroadcast(msg.sender); + bridgehub.addChainTypeManager(addresses.stateTransition.chainTypeManagerProxy); + console.log("ChainTypeManager registered"); + CTMDeploymentTracker ctmDT = CTMDeploymentTracker(addresses.bridgehub.ctmDeploymentTrackerProxy); + // vm.startBroadcast(msg.sender); + L1AssetRouter sharedBridge = L1AssetRouter(addresses.bridges.sharedBridgeProxy); + sharedBridge.setAssetDeploymentTracker( + bytes32(uint256(uint160(addresses.stateTransition.chainTypeManagerProxy))), + address(ctmDT) + ); + console.log("CTM DT whitelisted"); + + ctmDT.registerCTMAssetOnL1(addresses.stateTransition.chainTypeManagerProxy); + vm.stopBroadcast(); + console.log("CTM registered in CTMDeploymentTracker"); - function deployStateTransitionManagerContract() internal { - deployStateTransitionDiamondFacets(); - deployStateTransitionManagerImplementation(); - deployStateTransitionManagerProxy(); - registerStateTransitionManager(); - } - - function deployStateTransitionDiamondFacets() internal { - address executorFacet = deployViaCreate2(type(ExecutorFacet).creationCode); - console.log("ExecutorFacet deployed at:", executorFacet); - addresses.stateTransition.executorFacet = executorFacet; - - address adminFacet = deployViaCreate2(type(AdminFacet).creationCode); - console.log("AdminFacet deployed at:", adminFacet); - addresses.stateTransition.adminFacet = adminFacet; - - address mailboxFacet = deployViaCreate2( - abi.encodePacked(type(MailboxFacet).creationCode, abi.encode(config.eraChainId)) - ); - console.log("MailboxFacet deployed at:", mailboxFacet); - addresses.stateTransition.mailboxFacet = mailboxFacet; - - address gettersFacet = deployViaCreate2(type(GettersFacet).creationCode); - console.log("GettersFacet deployed at:", gettersFacet); - addresses.stateTransition.gettersFacet = gettersFacet; - - address diamondInit = deployViaCreate2(type(DiamondInit).creationCode); - console.log("DiamondInit deployed at:", diamondInit); - addresses.stateTransition.diamondInit = diamondInit; - } - - function deployStateTransitionManagerImplementation() internal { - bytes memory bytecode = abi.encodePacked( - type(StateTransitionManager).creationCode, - abi.encode(addresses.bridgehub.bridgehubProxy), - abi.encode(config.contracts.maxNumberOfChains) - ); - address contractAddress = deployViaCreate2(bytecode); - console.log("StateTransitionManagerImplementation deployed at:", contractAddress); - addresses.stateTransition.stateTransitionImplementation = contractAddress; - } - - function deployStateTransitionManagerProxy() internal { - Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](4); - facetCuts[0] = Diamond.FacetCut({ - facet: addresses.stateTransition.adminFacet, - action: Diamond.Action.Add, - isFreezable: false, - selectors: Utils.getAllSelectors(addresses.stateTransition.adminFacet.code) - }); - facetCuts[1] = Diamond.FacetCut({ - facet: addresses.stateTransition.gettersFacet, - action: Diamond.Action.Add, - isFreezable: false, - selectors: Utils.getAllSelectors(addresses.stateTransition.gettersFacet.code) - }); - facetCuts[2] = Diamond.FacetCut({ - facet: addresses.stateTransition.mailboxFacet, - action: Diamond.Action.Add, - isFreezable: true, - selectors: Utils.getAllSelectors(addresses.stateTransition.mailboxFacet.code) - }); - facetCuts[3] = Diamond.FacetCut({ - facet: addresses.stateTransition.executorFacet, - action: Diamond.Action.Add, - isFreezable: true, - selectors: Utils.getAllSelectors(addresses.stateTransition.executorFacet.code) - }); - - VerifierParams memory verifierParams = VerifierParams({ - recursionNodeLevelVkHash: config.contracts.recursionNodeLevelVkHash, - recursionLeafLevelVkHash: config.contracts.recursionLeafLevelVkHash, - recursionCircuitsSetVksHash: config.contracts.recursionCircuitsSetVksHash - }); - - FeeParams memory feeParams = FeeParams({ - pubdataPricingMode: config.contracts.diamondInitPubdataPricingMode, - batchOverheadL1Gas: uint32(config.contracts.diamondInitBatchOverheadL1Gas), - maxPubdataPerBatch: uint32(config.contracts.diamondInitMaxPubdataPerBatch), - maxL2GasPerBatch: uint32(config.contracts.diamondInitMaxL2GasPerBatch), - priorityTxMaxPubdata: uint32(config.contracts.diamondInitPriorityTxMaxPubdata), - minimalL2GasPrice: uint64(config.contracts.diamondInitMinimalL2GasPrice) - }); - - DiamondInitializeDataNewChain memory initializeData = DiamondInitializeDataNewChain({ - verifier: IVerifier(addresses.stateTransition.verifier), - verifierParams: verifierParams, - l2BootloaderBytecodeHash: config.contracts.bootloaderHash, - l2DefaultAccountBytecodeHash: config.contracts.defaultAAHash, - priorityTxMaxGasLimit: config.contracts.priorityTxMaxGasLimit, - feeParams: feeParams, - blobVersionedHashRetriever: addresses.blobVersionedHashRetriever - }); - - Diamond.DiamondCutData memory diamondCut = Diamond.DiamondCutData({ - facetCuts: facetCuts, - initAddress: addresses.stateTransition.diamondInit, - initCalldata: abi.encode(initializeData) - }); - - config.contracts.diamondCutData = abi.encode(diamondCut); - - ChainCreationParams memory chainCreationParams = ChainCreationParams({ - genesisUpgrade: addresses.stateTransition.genesisUpgrade, - genesisBatchHash: config.contracts.genesisRoot, - genesisIndexRepeatedStorageChanges: uint64(config.contracts.genesisRollupLeafIndex), - genesisBatchCommitment: config.contracts.genesisBatchCommitment, - diamondCut: diamondCut - }); - - StateTransitionManagerInitializeData memory diamondInitData = StateTransitionManagerInitializeData({ - owner: msg.sender, - validatorTimelock: addresses.validatorTimelock, - chainCreationParams: chainCreationParams, - protocolVersion: config.contracts.latestProtocolVersion - }); - - address contractAddress = deployViaCreate2( - abi.encodePacked( - type(TransparentUpgradeableProxy).creationCode, - abi.encode( - addresses.stateTransition.stateTransitionImplementation, - addresses.transparentProxyAdmin, - abi.encodeCall(StateTransitionManager.initialize, (diamondInitData)) - ) - ) + bytes32 assetId = bridgehub.ctmAssetIdFromAddress(addresses.stateTransition.chainTypeManagerProxy); + // console.log(address(bridgehub.ctmDeployer()), addresses.bridgehub.ctmDeploymentTrackerProxy); + // console.log(address(bridgehub.ctmDeployer().BRIDGE_HUB()), addresses.bridgehub.bridgehubProxy); + console.log( + "CTM in router 1", + sharedBridge.assetHandlerAddress(assetId), + bridgehub.ctmAssetIdToAddress(assetId) ); - console.log("StateTransitionManagerProxy deployed at:", contractAddress); - addresses.stateTransition.stateTransitionProxy = contractAddress; } - function registerStateTransitionManager() internal { - Bridgehub bridgehub = Bridgehub(addresses.bridgehub.bridgehubProxy); - vm.broadcast(); - bridgehub.addStateTransitionManager(addresses.stateTransition.stateTransitionProxy); - console.log("StateTransitionManager registered"); - } - - function setStateTransitionManagerInValidatorTimelock() internal { + function setChainTypeManagerInValidatorTimelock() internal { ValidatorTimelock validatorTimelock = ValidatorTimelock(addresses.validatorTimelock); - vm.broadcast(); - validatorTimelock.setStateTransitionManager( - IStateTransitionManager(addresses.stateTransition.stateTransitionProxy) - ); - console.log("StateTransitionManager set in ValidatorTimelock"); + vm.broadcast(msg.sender); + validatorTimelock.setChainTypeManager(IChainTypeManager(addresses.stateTransition.chainTypeManagerProxy)); + console.log("ChainTypeManager set in ValidatorTimelock"); } function deployDiamondProxy() internal { @@ -499,11 +267,10 @@ contract DeployL1Script is Script { initAddress: address(0), initCalldata: "" }); - bytes memory bytecode = abi.encodePacked( + address contractAddress = deployViaCreate2( type(DiamondProxy).creationCode, abi.encode(config.l1ChainId, diamondCut) ); - address contractAddress = deployViaCreate2(bytecode); console.log("DiamondProxy deployed at:", contractAddress); addresses.stateTransition.diamondProxy = contractAddress; } @@ -511,75 +278,180 @@ contract DeployL1Script is Script { function deploySharedBridgeContracts() internal { deploySharedBridgeImplementation(); deploySharedBridgeProxy(); - registerSharedBridge(); + } + + function deployL1NullifierContracts() internal { + deployL1NullifierImplementation(); + deployL1NullifierProxy(); + } + + function deployL1NullifierImplementation() internal { + // TODO(EVM-743): allow non-dev nullifier in the local deployment + address contractAddress = deployViaCreate2( + type(L1NullifierDev).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode(addresses.bridgehub.bridgehubProxy, config.eraChainId, addresses.stateTransition.diamondProxy) + ); + console.log("L1NullifierImplementation deployed at:", contractAddress); + addresses.bridges.l1NullifierImplementation = contractAddress; + } + + function deployL1NullifierProxy() internal { + bytes memory initCalldata = abi.encodeCall(L1Nullifier.initialize, (config.deployerAddress, 1, 1, 1, 0)); + address contractAddress = deployViaCreate2( + type(TransparentUpgradeableProxy).creationCode, + abi.encode(addresses.bridges.l1NullifierImplementation, addresses.transparentProxyAdmin, initCalldata) + ); + console.log("L1NullifierProxy deployed at:", contractAddress); + addresses.bridges.l1NullifierProxy = contractAddress; } function deploySharedBridgeImplementation() internal { - bytes memory bytecode = abi.encodePacked( - type(L1SharedBridge).creationCode, + address contractAddress = deployViaCreate2( + type(L1AssetRouter).creationCode, // solhint-disable-next-line func-named-parameters abi.encode( config.tokens.tokenWethAddress, addresses.bridgehub.bridgehubProxy, + addresses.bridges.l1NullifierProxy, config.eraChainId, addresses.stateTransition.diamondProxy ) ); - address contractAddress = deployViaCreate2(bytecode); console.log("SharedBridgeImplementation deployed at:", contractAddress); addresses.bridges.sharedBridgeImplementation = contractAddress; } function deploySharedBridgeProxy() internal { - bytes memory initCalldata = abi.encodeCall(L1SharedBridge.initialize, (config.deployerAddress)); - bytes memory bytecode = abi.encodePacked( + bytes memory initCalldata = abi.encodeCall(L1AssetRouter.initialize, (config.deployerAddress)); + address contractAddress = deployViaCreate2( type(TransparentUpgradeableProxy).creationCode, abi.encode(addresses.bridges.sharedBridgeImplementation, addresses.transparentProxyAdmin, initCalldata) ); - address contractAddress = deployViaCreate2(bytecode); console.log("SharedBridgeProxy deployed at:", contractAddress); addresses.bridges.sharedBridgeProxy = contractAddress; } - function registerSharedBridge() internal { + function setBridgehubParams() internal { Bridgehub bridgehub = Bridgehub(addresses.bridgehub.bridgehubProxy); - vm.startBroadcast(); - bridgehub.addToken(ADDRESS_ONE); - bridgehub.setSharedBridge(addresses.bridges.sharedBridgeProxy); + vm.startBroadcast(msg.sender); + bridgehub.addTokenAssetId(bridgehub.baseTokenAssetId(config.eraChainId)); + // bridgehub.setSharedBridge(addresses.bridges.sharedBridgeProxy); + bridgehub.setAddresses( + addresses.bridges.sharedBridgeProxy, + ICTMDeploymentTracker(addresses.bridgehub.ctmDeploymentTrackerProxy), + IMessageRoot(addresses.bridgehub.messageRootProxy) + ); vm.stopBroadcast(); console.log("SharedBridge registered"); } function deployErc20BridgeImplementation() internal { - bytes memory bytecode = abi.encodePacked( + address contractAddress = deployViaCreate2( type(L1ERC20Bridge).creationCode, - abi.encode(addresses.bridges.sharedBridgeProxy) + abi.encode( + addresses.bridges.l1NullifierProxy, + addresses.bridges.sharedBridgeProxy, + addresses.vaults.l1NativeTokenVaultProxy, + config.eraChainId + ) ); - address contractAddress = deployViaCreate2(bytecode); console.log("Erc20BridgeImplementation deployed at:", contractAddress); addresses.bridges.erc20BridgeImplementation = contractAddress; } function deployErc20BridgeProxy() internal { bytes memory initCalldata = abi.encodeCall(L1ERC20Bridge.initialize, ()); - bytes memory bytecode = abi.encodePacked( + address contractAddress = deployViaCreate2( type(TransparentUpgradeableProxy).creationCode, abi.encode(addresses.bridges.erc20BridgeImplementation, addresses.transparentProxyAdmin, initCalldata) ); - address contractAddress = deployViaCreate2(bytecode); console.log("Erc20BridgeProxy deployed at:", contractAddress); addresses.bridges.erc20BridgeProxy = contractAddress; } function updateSharedBridge() internal { - L1SharedBridge sharedBridge = L1SharedBridge(addresses.bridges.sharedBridgeProxy); - vm.broadcast(); - sharedBridge.setL1Erc20Bridge(addresses.bridges.erc20BridgeProxy); + L1AssetRouter sharedBridge = L1AssetRouter(addresses.bridges.sharedBridgeProxy); + vm.broadcast(msg.sender); + sharedBridge.setL1Erc20Bridge(L1ERC20Bridge(addresses.bridges.erc20BridgeProxy)); console.log("SharedBridge updated with ERC20Bridge address"); } + function deployBridgedStandardERC20Implementation() internal { + address contractAddress = deployViaCreate2( + type(BridgedStandardERC20).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode() + ); + console.log("BridgedStandardERC20Implementation deployed at:", contractAddress); + addresses.bridges.bridgedStandardERC20Implementation = contractAddress; + } + + function deployBridgedTokenBeacon() internal { + /// Note we cannot use create2 as the deployer is the owner. + vm.broadcast(); + UpgradeableBeacon beacon = new UpgradeableBeacon(addresses.bridges.bridgedStandardERC20Implementation); + address contractAddress = address(beacon); + vm.broadcast(); + beacon.transferOwnership(config.ownerAddress); + console.log("BridgedTokenBeacon deployed at:", contractAddress); + addresses.bridges.bridgedTokenBeacon = contractAddress; + } + + function deployL1NativeTokenVaultImplementation() internal { + address contractAddress = deployViaCreate2( + type(L1NativeTokenVault).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode( + config.tokens.tokenWethAddress, + addresses.bridges.sharedBridgeProxy, + config.eraChainId, + addresses.bridges.l1NullifierProxy + ) + ); + console.log("L1NativeTokenVaultImplementation deployed at:", contractAddress); + addresses.vaults.l1NativeTokenVaultImplementation = contractAddress; + } + + function deployL1NativeTokenVaultProxy() internal { + bytes memory initCalldata = abi.encodeCall( + L1NativeTokenVault.initialize, + (config.ownerAddress, addresses.bridges.bridgedTokenBeacon) + ); + address contractAddress = deployViaCreate2( + type(TransparentUpgradeableProxy).creationCode, + abi.encode(addresses.vaults.l1NativeTokenVaultImplementation, addresses.transparentProxyAdmin, initCalldata) + ); + console.log("L1NativeTokenVaultProxy deployed at:", contractAddress); + addresses.vaults.l1NativeTokenVaultProxy = contractAddress; + + IL1AssetRouter sharedBridge = IL1AssetRouter(addresses.bridges.sharedBridgeProxy); + IL1Nullifier l1Nullifier = IL1Nullifier(addresses.bridges.l1NullifierProxy); + // Ownable ownable = Ownable(addresses.bridges.sharedBridgeProxy); + + vm.broadcast(msg.sender); + sharedBridge.setNativeTokenVault(INativeTokenVault(addresses.vaults.l1NativeTokenVaultProxy)); + vm.broadcast(msg.sender); + l1Nullifier.setL1NativeTokenVault(IL1NativeTokenVault(addresses.vaults.l1NativeTokenVaultProxy)); + vm.broadcast(msg.sender); + l1Nullifier.setL1AssetRouter(addresses.bridges.sharedBridgeProxy); + + vm.broadcast(msg.sender); + IL1NativeTokenVault(addresses.vaults.l1NativeTokenVaultProxy).registerEthToken(); + + // bytes memory data = abi.encodeCall(sharedBridge.setNativeTokenVault, (IL1NativeTokenVault(addresses.vaults.l1NativeTokenVaultProxy))); + // Utils.executeUpgrade({ + // _governor: ownable.owner(), + // _salt: bytes32(0), + // _target: addresses.bridges.sharedBridgeProxy, + // _data: data, + // _value: 0, + // _delay: 0 + // }); + } + function updateOwners() internal { - vm.startBroadcast(); + vm.startBroadcast(msg.sender); ValidatorTimelock validatorTimelock = ValidatorTimelock(addresses.validatorTimelock); validatorTimelock.transferOwnership(config.ownerAddress); @@ -588,35 +460,80 @@ contract DeployL1Script is Script { bridgehub.transferOwnership(addresses.governance); bridgehub.setPendingAdmin(addresses.chainAdmin); - L1SharedBridge sharedBridge = L1SharedBridge(addresses.bridges.sharedBridgeProxy); + L1AssetRouter sharedBridge = L1AssetRouter(addresses.bridges.sharedBridgeProxy); sharedBridge.transferOwnership(addresses.governance); - sharedBridge.setPendingAdmin(addresses.chainAdmin); - StateTransitionManager stm = StateTransitionManager(addresses.stateTransition.stateTransitionProxy); - stm.transferOwnership(addresses.governance); - stm.setPendingAdmin(addresses.chainAdmin); + ChainTypeManager ctm = ChainTypeManager(addresses.stateTransition.chainTypeManagerProxy); + ctm.transferOwnership(addresses.governance); + ctm.setPendingAdmin(addresses.chainAdmin); + + CTMDeploymentTracker ctmDeploymentTracker = CTMDeploymentTracker(addresses.bridgehub.ctmDeploymentTrackerProxy); + ctmDeploymentTracker.transferOwnership(addresses.governance); vm.stopBroadcast(); console.log("Owners updated"); } - function saveOutput() internal { + function saveDiamondSelectors() public { + AdminFacet adminFacet = new AdminFacet(1); + GettersFacet gettersFacet = new GettersFacet(); + MailboxFacet mailboxFacet = new MailboxFacet(1, 1); + ExecutorFacet executorFacet = new ExecutorFacet(); + bytes4[] memory adminFacetSelectors = Utils.getAllSelectors(address(adminFacet).code); + bytes4[] memory gettersFacetSelectors = Utils.getAllSelectors(address(gettersFacet).code); + bytes4[] memory mailboxFacetSelectors = Utils.getAllSelectors(address(mailboxFacet).code); + bytes4[] memory executorFacetSelectors = Utils.getAllSelectors(address(executorFacet).code); + + string memory root = vm.projectRoot(); + string memory outputPath = string.concat(root, "/script-out/diamond-selectors.toml"); + + bytes memory adminFacetSelectorsBytes = abi.encode(adminFacetSelectors); + bytes memory gettersFacetSelectorsBytes = abi.encode(gettersFacetSelectors); + bytes memory mailboxFacetSelectorsBytes = abi.encode(mailboxFacetSelectors); + bytes memory executorFacetSelectorsBytes = abi.encode(executorFacetSelectors); + + vm.serializeBytes("diamond_selectors", "admin_facet_selectors", adminFacetSelectorsBytes); + vm.serializeBytes("diamond_selectors", "getters_facet_selectors", gettersFacetSelectorsBytes); + vm.serializeBytes("diamond_selectors", "mailbox_facet_selectors", mailboxFacetSelectorsBytes); + string memory toml = vm.serializeBytes( + "diamond_selectors", + "executor_facet_selectors", + executorFacetSelectorsBytes + ); + + vm.writeToml(toml, outputPath); + } + + function saveOutput(string memory outputPath) internal { vm.serializeAddress("bridgehub", "bridgehub_proxy_addr", addresses.bridgehub.bridgehubProxy); + vm.serializeAddress("bridgehub", "bridgehub_implementation_addr", addresses.bridgehub.bridgehubImplementation); + vm.serializeAddress( + "bridgehub", + "ctm_deployment_tracker_proxy_addr", + addresses.bridgehub.ctmDeploymentTrackerProxy + ); + vm.serializeAddress( + "bridgehub", + "ctm_deployment_tracker_implementation_addr", + addresses.bridgehub.ctmDeploymentTrackerImplementation + ); + vm.serializeAddress("bridgehub", "message_root_proxy_addr", addresses.bridgehub.messageRootProxy); string memory bridgehub = vm.serializeAddress( "bridgehub", - "bridgehub_implementation_addr", - addresses.bridgehub.bridgehubImplementation + "message_root_implementation_addr", + addresses.bridgehub.messageRootImplementation ); + // TODO(EVM-744): this has to be renamed to chain type manager vm.serializeAddress( "state_transition", "state_transition_proxy_addr", - addresses.stateTransition.stateTransitionProxy + addresses.stateTransition.chainTypeManagerProxy ); vm.serializeAddress( "state_transition", "state_transition_implementation_addr", - addresses.stateTransition.stateTransitionImplementation + addresses.stateTransition.chainTypeManagerImplementation ); vm.serializeAddress("state_transition", "verifier_addr", addresses.stateTransition.verifier); vm.serializeAddress("state_transition", "admin_facet_addr", addresses.stateTransition.adminFacet); @@ -634,6 +551,8 @@ contract DeployL1Script is Script { vm.serializeAddress("bridges", "erc20_bridge_implementation_addr", addresses.bridges.erc20BridgeImplementation); vm.serializeAddress("bridges", "erc20_bridge_proxy_addr", addresses.bridges.erc20BridgeProxy); + vm.serializeAddress("bridges", "l1_nullifier_implementation_addr", addresses.bridges.l1NullifierImplementation); + vm.serializeAddress("bridges", "l1_nullifier_proxy_addr", addresses.bridges.l1NullifierProxy); vm.serializeAddress( "bridges", "shared_bridge_implementation_addr", @@ -647,8 +566,8 @@ contract DeployL1Script is Script { vm.serializeUint( "contracts_config", - "diamond_init_pubdata_pricing_mode", - uint256(config.contracts.diamondInitPubdataPricingMode) + "diamond_init_max_l2_gas_per_batch", + config.contracts.diamondInitMaxL2GasPerBatch ); vm.serializeUint( "contracts_config", @@ -662,8 +581,8 @@ contract DeployL1Script is Script { ); vm.serializeUint( "contracts_config", - "diamond_init_max_l2_gas_per_batch", - config.contracts.diamondInitMaxL2GasPerBatch + "diamond_init_minimal_l2_gas_price", + config.contracts.diamondInitMinimalL2GasPrice ); vm.serializeUint( "contracts_config", @@ -672,13 +591,14 @@ contract DeployL1Script is Script { ); vm.serializeUint( "contracts_config", - "diamond_init_minimal_l2_gas_price", - config.contracts.diamondInitMinimalL2GasPrice + "diamond_init_pubdata_pricing_mode", + uint256(config.contracts.diamondInitPubdataPricingMode) ); + vm.serializeUint("contracts_config", "priority_tx_max_gas_limit", config.contracts.priorityTxMaxGasLimit); vm.serializeBytes32( "contracts_config", - "recursion_node_level_vk_hash", - config.contracts.recursionNodeLevelVkHash + "recursion_circuits_set_vks_hash", + config.contracts.recursionCircuitsSetVksHash ); vm.serializeBytes32( "contracts_config", @@ -687,28 +607,52 @@ contract DeployL1Script is Script { ); vm.serializeBytes32( "contracts_config", - "recursion_circuits_set_vks_hash", - config.contracts.recursionCircuitsSetVksHash + "recursion_node_level_vk_hash", + config.contracts.recursionNodeLevelVkHash ); - vm.serializeUint("contracts_config", "priority_tx_max_gas_limit", config.contracts.priorityTxMaxGasLimit); + vm.serializeBytes("contracts_config", "diamond_cut_data", config.contracts.diamondCutData); + string memory contractsConfig = vm.serializeBytes( "contracts_config", - "diamond_cut_data", - config.contracts.diamondCutData + "force_deployments_data", + generatedData.forceDeploymentsData ); - vm.serializeAddress("deployed_addresses", "transparent_proxy_admin_addr", addresses.transparentProxyAdmin); - vm.serializeAddress("deployed_addresses", "governance_addr", addresses.governance); vm.serializeAddress( "deployed_addresses", "blob_versioned_hash_retriever_addr", addresses.blobVersionedHashRetriever ); + vm.serializeAddress("deployed_addresses", "governance_addr", addresses.governance); + vm.serializeAddress("deployed_addresses", "transparent_proxy_admin_addr", addresses.transparentProxyAdmin); + vm.serializeAddress("deployed_addresses", "validator_timelock_addr", addresses.validatorTimelock); vm.serializeAddress("deployed_addresses", "chain_admin", addresses.chainAdmin); + vm.serializeAddress( + "deployed_addresses", + "access_control_restriction_addr", + addresses.accessControlRestrictionAddress + ); vm.serializeString("deployed_addresses", "bridgehub", bridgehub); + vm.serializeString("deployed_addresses", "bridges", bridges); vm.serializeString("deployed_addresses", "state_transition", stateTransition); - string memory deployedAddresses = vm.serializeString("deployed_addresses", "bridges", bridges); + + vm.serializeAddress( + "deployed_addresses", + "rollup_l1_da_validator_addr", + addresses.daAddresses.l1RollupDAValidator + ); + vm.serializeAddress( + "deployed_addresses", + "validium_l1_da_validator_addr", + addresses.daAddresses.l1ValidiumDAValidator + ); + + string memory deployedAddresses = vm.serializeAddress( + "deployed_addresses", + "native_token_vault_addr", + addresses.vaults.l1NativeTokenVaultProxy + ); vm.serializeAddress("root", "create2_factory_addr", addresses.create2Factory); vm.serializeBytes32("root", "create2_factory_salt", config.contracts.create2FactorySalt); @@ -718,13 +662,41 @@ contract DeployL1Script is Script { vm.serializeAddress("root", "deployer_addr", config.deployerAddress); vm.serializeString("root", "deployed_addresses", deployedAddresses); vm.serializeString("root", "contracts_config", contractsConfig); - string memory toml = vm.serializeAddress("root", "owner_addr", config.ownerAddress); + string memory toml = vm.serializeAddress("root", "owner_address", config.ownerAddress); + + vm.writeToml(toml, outputPath); + } + + function prepareForceDeploymentsData() internal view returns (bytes memory) { + require(addresses.governance != address(0), "Governance address is not set"); + + FixedForceDeploymentsData memory data = FixedForceDeploymentsData({ + l1ChainId: config.l1ChainId, + eraChainId: config.eraChainId, + l1AssetRouter: addresses.bridges.sharedBridgeProxy, + l2TokenProxyBytecodeHash: L2ContractHelper.hashL2Bytecode( + L2ContractsBytecodesLib.readBeaconProxyBytecode() + ), + aliasedL1Governance: AddressAliasHelper.applyL1ToL2Alias(addresses.governance), + maxNumberOfZKChains: config.contracts.maxNumberOfChains, + bridgehubBytecodeHash: L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readBridgehubBytecode()), + l2AssetRouterBytecodeHash: L2ContractHelper.hashL2Bytecode( + L2ContractsBytecodesLib.readL2AssetRouterBytecode() + ), + l2NtvBytecodeHash: L2ContractHelper.hashL2Bytecode( + L2ContractsBytecodesLib.readL2NativeTokenVaultBytecode() + ), + messageRootBytecodeHash: L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readMessageRootBytecode()), + // For newly created chains it it is expected that the following bridges are not present + l2SharedBridgeLegacyImpl: address(0), + l2BridgedStandardERC20Impl: address(0), + l2BridgeProxyOwnerAddress: address(0), + l2BridgedStandardERC20ProxyOwnerAddress: address(0) + }); - string memory path = string.concat(vm.projectRoot(), "/script-out/output-deploy-l1.toml"); - vm.writeToml(toml, path); + return abi.encode(data); } - function deployViaCreate2(bytes memory _bytecode) internal returns (address) { - return Utils.deployViaCreate2(_bytecode, config.contracts.create2FactorySalt, addresses.create2Factory); - } + // add this to be excluded from coverage report + function test() internal virtual override {} } diff --git a/l1-contracts/deploy-scripts/DeployL2Contracts.sol b/l1-contracts/deploy-scripts/DeployL2Contracts.sol index 3525b66b4..31415e186 100644 --- a/l1-contracts/deploy-scripts/DeployL2Contracts.sol +++ b/l1-contracts/deploy-scripts/DeployL2Contracts.sol @@ -1,4 +1,6 @@ -pragma solidity ^0.8.24; +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.21; import {Script} from "forge-std/Script.sol"; import {stdToml} from "forge-std/StdToml.sol"; @@ -6,42 +8,33 @@ import {stdToml} from "forge-std/StdToml.sol"; import {Utils} from "./Utils.sol"; import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; -import {L1SharedBridge} from "contracts/bridge/L1SharedBridge.sol"; +import {L2ContractsBytecodesLib} from "./L2ContractsBytecodesLib.sol"; +// import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; contract DeployL2Script is Script { using stdToml for string; - Config config; - ContractsBytecodes contracts; + Config internal config; + DeployedContrats internal deployed; + // solhint-disable-next-line gas-struct-packing struct Config { - address bridgehubAddress; + uint256 eraChainId; + uint256 chainId; address l1SharedBridgeProxy; + address bridgehubAddress; address governance; address erc20BridgeProxy; - // The owner of the contract sets the validator/attester weights. - // Can be the developer multisig wallet on mainnet. + bool validiumMode; address consensusRegistryOwner; - uint256 chainId; - uint256 eraChainId; - address l2SharedBridgeImplementation; - address l2SharedBridgeProxy; + } + + struct DeployedContrats { + address l2DaValidatorAddress; + address forceDeployUpgraderAddress; address consensusRegistryImplementation; address consensusRegistryProxy; address multicall3; - address forceDeployUpgraderAddress; - } - - struct ContractsBytecodes { - bytes l2StandardErc20FactoryBytecode; - bytes beaconProxy; - bytes l2StandardErc20Bytecode; - bytes l2SharedBridgeBytecode; - bytes l2SharedBridgeProxyBytecode; - bytes consensusRegistryBytecode; - bytes consensusRegistryProxyBytecode; - bytes multicall3Bytecode; - bytes forceDeployUpgrader; } function run() public { @@ -54,12 +47,10 @@ contract DeployL2Script is Script { function deploy(bool legacyBridge) public { initializeConfig(); - loadContracts(legacyBridge); - deployFactoryDeps(); - deploySharedBridge(); - deploySharedBridgeProxy(legacyBridge); - initializeChain(); + // Note, that it is important that the first transaction is for setting the L2 DA validator + deployL2DaValidator(); + deployForceDeployer(); deployConsensusRegistry(); deployConsensusRegistryProxy(); @@ -76,21 +67,15 @@ contract DeployL2Script is Script { deploySharedBridge(false); } + // TODO(EVM-745): port legacy contract tests to new contracts function deploySharedBridge(bool legacyBridge) internal { initializeConfig(); - loadContracts(legacyBridge); - - deployFactoryDeps(); - deploySharedBridge(); - deploySharedBridgeProxy(legacyBridge); - initializeChain(); saveOutput(); } function runDefaultUpgrader() public { initializeConfig(); - loadContracts(false); deployForceDeployer(); @@ -99,7 +84,6 @@ contract DeployL2Script is Script { function runDeployConsensusRegistry() public { initializeConfig(); - loadContracts(false); deployConsensusRegistry(); deployConsensusRegistryProxy(); @@ -109,59 +93,17 @@ contract DeployL2Script is Script { function runDeployMulticall3() public { initializeConfig(); - loadContracts(false); deployMulticall3(); saveOutput(); } - function loadContracts(bool legacyBridge) internal { - //HACK: Meanwhile we are not integrated foundry zksync we use contracts that has been built using hardhat - contracts.l2StandardErc20FactoryBytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/@openzeppelin/contracts/proxy/beacon/UpgradeableBeacon.sol/UpgradeableBeacon.json" - ); - contracts.beaconProxy = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/@openzeppelin/contracts/proxy/beacon/BeaconProxy.sol/BeaconProxy.json" - ); - contracts.l2StandardErc20Bytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/contracts/bridge/L2StandardERC20.sol/L2StandardERC20.json" - ); - - if (legacyBridge) { - contracts.l2SharedBridgeBytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/contracts/dev-contracts/DevL2SharedBridge.sol/DevL2SharedBridge.json" - ); - } else { - contracts.l2SharedBridgeBytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/contracts/bridge/L2SharedBridge.sol/L2SharedBridge.json" - ); - } - - contracts.l2SharedBridgeProxyBytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol/TransparentUpgradeableProxy.json" - ); - - contracts.consensusRegistryBytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/contracts/ConsensusRegistry.sol/ConsensusRegistry.json" - ); - contracts.consensusRegistryProxyBytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol/TransparentUpgradeableProxy.json" - ); - - contracts.multicall3Bytecode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/contracts/dev-contracts/Multicall3.sol/Multicall3.json" - ); - - contracts.forceDeployUpgrader = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/contracts/ForceDeployUpgrader.sol/ForceDeployUpgrader.json" - ); - } - function initializeConfig() internal { string memory root = vm.projectRoot(); string memory path = string.concat(root, "/script-config/config-deploy-l2-contracts.toml"); string memory toml = vm.readFile(path); + config.validiumMode = toml.readBool("$.validium_mode"); config.bridgehubAddress = toml.readAddress("$.bridgehub"); config.governance = toml.readAddress("$.governance"); config.l1SharedBridgeProxy = toml.readAddress("$.l1_shared_bridge"); @@ -172,37 +114,31 @@ contract DeployL2Script is Script { } function saveOutput() internal { - vm.serializeAddress("root", "l2_shared_bridge_implementation", config.l2SharedBridgeImplementation); - vm.serializeAddress("root", "l2_shared_bridge_proxy", config.l2SharedBridgeProxy); - vm.serializeAddress("root", "consensus_registry_implementation", config.consensusRegistryImplementation); - vm.serializeAddress("root", "consensus_registry_proxy", config.consensusRegistryProxy); - vm.serializeAddress("root", "multicall3", config.multicall3); - string memory toml = vm.serializeAddress("root", "l2_default_upgrader", config.forceDeployUpgraderAddress); + vm.serializeAddress("root", "l2_da_validator_address", deployed.l2DaValidatorAddress); + vm.serializeAddress("root", "multicall3", deployed.multicall3); + vm.serializeAddress("root", "consensus_registry_implementation", deployed.consensusRegistryImplementation); + vm.serializeAddress("root", "consensus_registry_proxy", deployed.consensusRegistryProxy); + string memory toml = vm.serializeAddress("root", "l2_default_upgrader", deployed.forceDeployUpgraderAddress); + string memory root = vm.projectRoot(); string memory path = string.concat(root, "/script-out/output-deploy-l2-contracts.toml"); vm.writeToml(toml, path); } - function deployFactoryDeps() internal { - bytes[] memory factoryDeps = new bytes[](3); - factoryDeps[0] = contracts.l2StandardErc20FactoryBytecode; - factoryDeps[1] = contracts.l2StandardErc20Bytecode; - factoryDeps[2] = contracts.beaconProxy; - Utils.publishBytecodes(factoryDeps, config.chainId, config.bridgehubAddress, config.l1SharedBridgeProxy); - } - - function deploySharedBridge() internal { - bytes[] memory factoryDeps = new bytes[](1); - factoryDeps[0] = contracts.beaconProxy; - - bytes memory constructorData = abi.encode(config.eraChainId); + function deployL2DaValidator() internal { + bytes memory bytecode; + if (config.validiumMode) { + bytecode = L2ContractsBytecodesLib.readValidiumL2DAValidatorBytecode(); + } else { + bytecode = L2ContractsBytecodesLib.readRollupL2DAValidatorBytecode(); + } - config.l2SharedBridgeImplementation = Utils.deployThroughL1({ - bytecode: contracts.l2SharedBridgeBytecode, - constructorargs: constructorData, + deployed.l2DaValidatorAddress = Utils.deployThroughL1({ + bytecode: bytecode, + constructorargs: bytes(""), create2salt: "", l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, - factoryDeps: factoryDeps, + factoryDeps: new bytes[](0), chainId: config.chainId, bridgehubAddress: config.bridgehubAddress, l1SharedBridgeProxy: config.l1SharedBridgeProxy @@ -211,8 +147,8 @@ contract DeployL2Script is Script { function deployForceDeployer() internal { bytes[] memory factoryDeps = new bytes[](0); - config.forceDeployUpgraderAddress = Utils.deployThroughL1({ - bytecode: contracts.forceDeployUpgrader, + deployed.forceDeployUpgraderAddress = Utils.deployThroughL1({ + bytecode: L2ContractsBytecodesLib.readForceDeployUpgraderBytecode(), constructorargs: "", create2salt: "", l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, @@ -223,51 +159,13 @@ contract DeployL2Script is Script { }); } - function deploySharedBridgeProxy(bool legacyBridge) internal { - address l2GovernorAddress = AddressAliasHelper.applyL1ToL2Alias(config.governance); - bytes32 l2StandardErc20BytecodeHash = L2ContractHelper.hashL2Bytecode(contracts.beaconProxy); - - string memory functionSignature; - - if (legacyBridge) { - functionSignature = "initializeDevBridge(address,address,bytes32,address)"; - } else { - functionSignature = "initialize(address,address,bytes32,address)"; - } - // solhint-disable-next-line func-named-parameters - bytes memory proxyInitializationParams = abi.encodeWithSignature( - functionSignature, - config.l1SharedBridgeProxy, - config.erc20BridgeProxy, - l2StandardErc20BytecodeHash, - l2GovernorAddress - ); - - bytes memory l2SharedBridgeProxyConstructorData = abi.encode( - config.l2SharedBridgeImplementation, - l2GovernorAddress, - proxyInitializationParams - ); - - config.l2SharedBridgeProxy = Utils.deployThroughL1({ - bytecode: contracts.l2SharedBridgeProxyBytecode, - constructorargs: l2SharedBridgeProxyConstructorData, - create2salt: "", - l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, - factoryDeps: new bytes[](0), - chainId: config.chainId, - bridgehubAddress: config.bridgehubAddress, - l1SharedBridgeProxy: config.l1SharedBridgeProxy - }); - } - // Deploy the ConsensusRegistry implementation and save its address into the config. function deployConsensusRegistry() internal { // ConsensusRegistry.sol doesn't have a constructor, just an initializer. bytes memory constructorData = ""; - config.consensusRegistryImplementation = Utils.deployThroughL1({ - bytecode: contracts.consensusRegistryBytecode, + deployed.consensusRegistryImplementation = Utils.deployThroughL1({ + bytecode: L2ContractsBytecodesLib.readConsensusRegistryBytecode(), constructorargs: constructorData, create2salt: "", l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, @@ -282,8 +180,8 @@ contract DeployL2Script is Script { // Multicall3 doesn't have a constructor. bytes memory constructorData = ""; - config.multicall3 = Utils.deployThroughL1({ - bytecode: contracts.multicall3Bytecode, + deployed.multicall3 = Utils.deployThroughL1({ + bytecode: L2ContractsBytecodesLib.readMulticall3Bytecode(), constructorargs: constructorData, create2salt: "", l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, @@ -308,13 +206,13 @@ contract DeployL2Script is Script { ); bytes memory consensusRegistryProxyConstructorData = abi.encode( - config.consensusRegistryImplementation, // _logic + deployed.consensusRegistryImplementation, // _logic l2GovernorAddress, // admin_ proxyInitializationParams // _data ); - config.consensusRegistryProxy = Utils.deployThroughL1({ - bytecode: contracts.consensusRegistryProxyBytecode, + deployed.consensusRegistryProxy = Utils.deployThroughL1({ + bytecode: L2ContractsBytecodesLib.readTransparentUpgradeableProxyBytecode(), constructorargs: consensusRegistryProxyConstructorData, create2salt: "", l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, @@ -324,15 +222,4 @@ contract DeployL2Script is Script { l1SharedBridgeProxy: config.l1SharedBridgeProxy }); } - - function initializeChain() internal { - L1SharedBridge bridge = L1SharedBridge(config.l1SharedBridgeProxy); - - Utils.chainAdminMulticall({ - _chainAdmin: bridge.admin(), - _target: config.l1SharedBridgeProxy, - _data: abi.encodeCall(bridge.initializeChainGovernance, (config.chainId, config.l2SharedBridgeProxy)), - _value: 0 - }); - } } diff --git a/l1-contracts/deploy-scripts/DeployPaymaster.s.sol b/l1-contracts/deploy-scripts/DeployPaymaster.s.sol index 52b664bc2..e2b12b319 100644 --- a/l1-contracts/deploy-scripts/DeployPaymaster.s.sol +++ b/l1-contracts/deploy-scripts/DeployPaymaster.s.sol @@ -1,5 +1,6 @@ -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.24; +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.21; import {Script} from "forge-std/Script.sol"; import {stdToml} from "forge-std/StdToml.sol"; @@ -8,8 +9,9 @@ import {Utils} from "./Utils.sol"; contract DeployPaymaster is Script { using stdToml for string; - Config config; + Config internal config; + // solhint-disable-next-line gas-struct-packing struct Config { address bridgehubAddress; address l1SharedBridgeProxy; diff --git a/l1-contracts/deploy-scripts/DeployUtils.s.sol b/l1-contracts/deploy-scripts/DeployUtils.s.sol new file mode 100644 index 000000000..011e10fdc --- /dev/null +++ b/l1-contracts/deploy-scripts/DeployUtils.s.sol @@ -0,0 +1,487 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-custom-errors + +import {Script, console2 as console} from "forge-std/Script.sol"; +import {stdToml} from "forge-std/StdToml.sol"; +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {StateTransitionDeployedAddresses, Utils, L2_BRIDGEHUB_ADDRESS, L2_ASSET_ROUTER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS, L2_MESSAGE_ROOT_ADDRESS} from "./Utils.sol"; +import {Multicall3} from "contracts/dev-contracts/Multicall3.sol"; +import {Verifier} from "contracts/state-transition/Verifier.sol"; +import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; +import {VerifierParams, IVerifier} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; +import {DefaultUpgrade} from "contracts/upgrades/DefaultUpgrade.sol"; +import {Governance} from "contracts/governance/Governance.sol"; +import {L1GenesisUpgrade} from "contracts/upgrades/L1GenesisUpgrade.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {ValidatorTimelock} from "contracts/state-transition/ValidatorTimelock.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {CTMDeploymentTracker} from "contracts/bridgehub/CTMDeploymentTracker.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; +import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; +import {ChainTypeManager} from "contracts/state-transition/ChainTypeManager.sol"; +import {ChainTypeManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IChainTypeManager.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {InitializeDataNewChain as DiamondInitializeDataNewChain} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {L1ERC20Bridge} from "contracts/bridge/L1ERC20Bridge.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {AddressHasNoCode} from "./ZkSyncScriptErrors.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {IL2ContractDeployer} from "contracts/common/interfaces/IL2ContractDeployer.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {IL1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {L1NullifierDev} from "contracts/dev-contracts/L1NullifierDev.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L2ContractsBytecodesLib} from "./L2ContractsBytecodesLib.sol"; + +struct FixedForceDeploymentsData { + uint256 l1ChainId; + uint256 eraChainId; + address l1AssetRouter; + bytes32 l2TokenProxyBytecodeHash; + address aliasedL1Governance; + uint256 maxNumberOfZKChains; + bytes32 bridgehubBytecodeHash; + bytes32 l2AssetRouterBytecodeHash; + bytes32 l2NtvBytecodeHash; + bytes32 messageRootBytecodeHash; + address l2SharedBridgeLegacyImpl; + address l2BridgedStandardERC20Impl; + address l2BridgeProxyOwnerAddress; + address l2BridgedStandardERC20ProxyOwnerAddress; +} + +// solhint-disable-next-line gas-struct-packing +struct DeployedAddresses { + BridgehubDeployedAddresses bridgehub; + StateTransitionDeployedAddresses stateTransition; + BridgesDeployedAddresses bridges; + L1NativeTokenVaultAddresses vaults; + DataAvailabilityDeployedAddresses daAddresses; + address transparentProxyAdmin; + address governance; + address chainAdmin; + address accessControlRestrictionAddress; + address blobVersionedHashRetriever; + address validatorTimelock; + address create2Factory; +} + +// solhint-disable-next-line gas-struct-packing +struct L1NativeTokenVaultAddresses { + address l1NativeTokenVaultImplementation; + address l1NativeTokenVaultProxy; +} + +struct DataAvailabilityDeployedAddresses { + address l1RollupDAValidator; + address l1ValidiumDAValidator; +} + +// solhint-disable-next-line gas-struct-packing +struct BridgehubDeployedAddresses { + address bridgehubImplementation; + address bridgehubProxy; + address ctmDeploymentTrackerImplementation; + address ctmDeploymentTrackerProxy; + address messageRootImplementation; + address messageRootProxy; +} + +// solhint-disable-next-line gas-struct-packing +struct BridgesDeployedAddresses { + address erc20BridgeImplementation; + address erc20BridgeProxy; + address sharedBridgeImplementation; + address sharedBridgeProxy; + address l1NullifierImplementation; + address l1NullifierProxy; + address bridgedStandardERC20Implementation; + address bridgedTokenBeacon; +} + +// solhint-disable-next-line gas-struct-packing +struct Config { + uint256 l1ChainId; + address deployerAddress; + uint256 eraChainId; + address ownerAddress; + bool testnetVerifier; + ContractsConfig contracts; + TokensConfig tokens; +} + +// solhint-disable-next-line gas-struct-packing +struct ContractsConfig { + bytes32 create2FactorySalt; + address create2FactoryAddr; + address multicall3Addr; + uint256 validatorTimelockExecutionDelay; + bytes32 genesisRoot; + uint256 genesisRollupLeafIndex; + bytes32 genesisBatchCommitment; + uint256 latestProtocolVersion; + bytes32 recursionNodeLevelVkHash; + bytes32 recursionLeafLevelVkHash; + bytes32 recursionCircuitsSetVksHash; + uint256 priorityTxMaxGasLimit; + PubdataPricingMode diamondInitPubdataPricingMode; + uint256 diamondInitBatchOverheadL1Gas; + uint256 diamondInitMaxPubdataPerBatch; + uint256 diamondInitMaxL2GasPerBatch; + uint256 diamondInitPriorityTxMaxPubdata; + uint256 diamondInitMinimalL2GasPrice; + address governanceSecurityCouncilAddress; + uint256 governanceMinDelay; + uint256 maxNumberOfChains; + bytes diamondCutData; + bytes32 bootloaderHash; + bytes32 defaultAAHash; + uint256 fflonkProofLength; +} + +struct TokensConfig { + address tokenWethAddress; +} + +// solhint-disable-next-line gas-struct-packing +struct GeneratedData { + bytes forceDeploymentsData; +} + +contract DeployUtils is Script { + using stdToml for string; + + address internal constant DETERMINISTIC_CREATE2_ADDRESS = 0x4e59b44847b379578588920cA78FbF26c0B4956C; + + Config public config; + GeneratedData internal generatedData; + DeployedAddresses internal addresses; + + function initializeConfig(string memory configPath) internal { + string memory toml = vm.readFile(configPath); + + config.l1ChainId = block.chainid; + config.deployerAddress = msg.sender; + + // Config file must be parsed key by key, otherwise values returned + // are parsed alfabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + config.eraChainId = toml.readUint("$.era_chain_id"); + config.ownerAddress = toml.readAddress("$.owner_address"); + config.testnetVerifier = toml.readBool("$.testnet_verifier"); + + config.contracts.governanceSecurityCouncilAddress = toml.readAddress( + "$.contracts.governance_security_council_address" + ); + config.contracts.governanceMinDelay = toml.readUint("$.contracts.governance_min_delay"); + config.contracts.maxNumberOfChains = toml.readUint("$.contracts.max_number_of_chains"); + config.contracts.create2FactorySalt = toml.readBytes32("$.contracts.create2_factory_salt"); + if (vm.keyExistsToml(toml, "$.contracts.create2_factory_addr")) { + config.contracts.create2FactoryAddr = toml.readAddress("$.contracts.create2_factory_addr"); + } + config.contracts.validatorTimelockExecutionDelay = toml.readUint( + "$.contracts.validator_timelock_execution_delay" + ); + config.contracts.genesisRoot = toml.readBytes32("$.contracts.genesis_root"); + config.contracts.genesisRollupLeafIndex = toml.readUint("$.contracts.genesis_rollup_leaf_index"); + config.contracts.genesisBatchCommitment = toml.readBytes32("$.contracts.genesis_batch_commitment"); + config.contracts.latestProtocolVersion = toml.readUint("$.contracts.latest_protocol_version"); + config.contracts.recursionNodeLevelVkHash = toml.readBytes32("$.contracts.recursion_node_level_vk_hash"); + config.contracts.recursionLeafLevelVkHash = toml.readBytes32("$.contracts.recursion_leaf_level_vk_hash"); + config.contracts.recursionCircuitsSetVksHash = toml.readBytes32("$.contracts.recursion_circuits_set_vks_hash"); + config.contracts.priorityTxMaxGasLimit = toml.readUint("$.contracts.priority_tx_max_gas_limit"); + config.contracts.diamondInitPubdataPricingMode = PubdataPricingMode( + toml.readUint("$.contracts.diamond_init_pubdata_pricing_mode") + ); + config.contracts.diamondInitBatchOverheadL1Gas = toml.readUint( + "$.contracts.diamond_init_batch_overhead_l1_gas" + ); + config.contracts.diamondInitMaxPubdataPerBatch = toml.readUint( + "$.contracts.diamond_init_max_pubdata_per_batch" + ); + config.contracts.diamondInitMaxL2GasPerBatch = toml.readUint("$.contracts.diamond_init_max_l2_gas_per_batch"); + config.contracts.diamondInitPriorityTxMaxPubdata = toml.readUint( + "$.contracts.diamond_init_priority_tx_max_pubdata" + ); + config.contracts.diamondInitMinimalL2GasPrice = toml.readUint("$.contracts.diamond_init_minimal_l2_gas_price"); + config.contracts.defaultAAHash = toml.readBytes32("$.contracts.default_aa_hash"); + config.contracts.bootloaderHash = toml.readBytes32("$.contracts.bootloader_hash"); + + config.tokens.tokenWethAddress = toml.readAddress("$.tokens.token_weth_address"); + } + + function instantiateCreate2Factory() internal { + address contractAddress; + + bool isDeterministicDeployed = DETERMINISTIC_CREATE2_ADDRESS.code.length > 0; + bool isConfigured = config.contracts.create2FactoryAddr != address(0); + + if (isConfigured) { + if (config.contracts.create2FactoryAddr.code.length == 0) { + revert AddressHasNoCode(config.contracts.create2FactoryAddr); + } + contractAddress = config.contracts.create2FactoryAddr; + console.log("Using configured Create2Factory address:", contractAddress); + } else if (isDeterministicDeployed) { + contractAddress = DETERMINISTIC_CREATE2_ADDRESS; + console.log("Using deterministic Create2Factory address:", contractAddress); + } else { + contractAddress = Utils.deployCreate2Factory(); + console.log("Create2Factory deployed at:", contractAddress); + } + + addresses.create2Factory = contractAddress; + } + + function deployViaCreate2( + bytes memory creationCode, + bytes memory constructorArgs + ) internal virtual returns (address) { + return + Utils.deployViaCreate2( + abi.encodePacked(creationCode, constructorArgs), + config.contracts.create2FactorySalt, + addresses.create2Factory + ); + } + + function deployVerifier() internal { + bytes memory code; + if (config.testnetVerifier) { + code = type(TestnetVerifier).creationCode; + } else { + code = type(Verifier).creationCode; + } + address contractAddress = deployViaCreate2(code, ""); + console.log("Verifier deployed at:", contractAddress); + addresses.stateTransition.dualVerifier = contractAddress; + } + + function deployDefaultUpgrade() internal { + address contractAddress = deployViaCreate2(type(DefaultUpgrade).creationCode, ""); + console.log("DefaultUpgrade deployed at:", contractAddress); + addresses.stateTransition.defaultUpgrade = contractAddress; + } + + function deployGenesisUpgrade() internal { + address contractAddress = deployViaCreate2(type(L1GenesisUpgrade).creationCode, ""); + console.log("GenesisUpgrade deployed at:", contractAddress); + addresses.stateTransition.genesisUpgrade = contractAddress; + } + + function deployValidatorTimelock() internal { + uint32 executionDelay = uint32(config.contracts.validatorTimelockExecutionDelay); + address contractAddress = deployViaCreate2( + type(ValidatorTimelock).creationCode, + abi.encode(config.deployerAddress, executionDelay, config.eraChainId) + ); + console.log("ValidatorTimelock deployed at:", contractAddress); + addresses.validatorTimelock = contractAddress; + } + + function deployGovernance() internal { + address contractAddress = deployViaCreate2( + type(Governance).creationCode, + abi.encode( + config.ownerAddress, + config.contracts.governanceSecurityCouncilAddress, + config.contracts.governanceMinDelay + ) + ); + console.log("Governance deployed at:", contractAddress); + addresses.governance = contractAddress; + } + + function deployChainAdmin() internal { + address accessControlRestriction = deployViaCreate2( + type(AccessControlRestriction).creationCode, + abi.encode(uint256(0), config.ownerAddress) + ); + + console.log("Access control restriction deployed at:", accessControlRestriction); + address[] memory restrictions = new address[](1); + restrictions[0] = accessControlRestriction; + addresses.accessControlRestrictionAddress = accessControlRestriction; + + address contractAddress = deployViaCreate2(type(ChainAdmin).creationCode, abi.encode(restrictions)); + console.log("ChainAdmin deployed at:", contractAddress); + addresses.chainAdmin = contractAddress; + } + + function deployTransparentProxyAdmin() internal { + vm.startBroadcast(); + ProxyAdmin proxyAdmin = new ProxyAdmin(); + proxyAdmin.transferOwnership(addresses.governance); + vm.stopBroadcast(); + console.log("Transparent Proxy Admin deployed at:", address(proxyAdmin)); + addresses.transparentProxyAdmin = address(proxyAdmin); + } + + function deployChainTypeManagerContract() internal { + deployStateTransitionDiamondFacets(); + deployChainTypeManagerImplementation(); + deployChainTypeManagerProxy(); + } + + function deployStateTransitionDiamondFacets() internal { + address executorFacet = deployViaCreate2(type(ExecutorFacet).creationCode, abi.encode()); + console.log("ExecutorFacet deployed at:", executorFacet); + addresses.stateTransition.executorFacet = executorFacet; + + address adminFacet = deployViaCreate2(type(AdminFacet).creationCode, abi.encode(config.l1ChainId)); + console.log("AdminFacet deployed at:", adminFacet); + addresses.stateTransition.adminFacet = adminFacet; + + address mailboxFacet = deployViaCreate2( + type(MailboxFacet).creationCode, + abi.encode(config.eraChainId, config.l1ChainId) + ); + console.log("MailboxFacet deployed at:", mailboxFacet); + addresses.stateTransition.mailboxFacet = mailboxFacet; + + address gettersFacet = deployViaCreate2(type(GettersFacet).creationCode, ""); + console.log("GettersFacet deployed at:", gettersFacet); + addresses.stateTransition.gettersFacet = gettersFacet; + + address diamondInit = deployViaCreate2(type(DiamondInit).creationCode, ""); + console.log("DiamondInit deployed at:", diamondInit); + addresses.stateTransition.diamondInit = diamondInit; + } + + function deployChainTypeManagerImplementation() internal { + bytes memory bytecode = type(ChainTypeManager).creationCode; + bytes memory constructorArgs = abi.encode(addresses.bridgehub.bridgehubProxy); + address contractAddress = deployViaCreate2(bytecode, constructorArgs); + console.log("ChainTypeManagerImplementation deployed at:", contractAddress); + addresses.stateTransition.chainTypeManagerImplementation = contractAddress; + } + + function deployChainTypeManagerProxy() internal { + string memory root = vm.projectRoot(); + string memory inputPath = string.concat(root, "/script-out/diamond-selectors.toml"); + string memory toml = vm.readFile(inputPath); + + bytes memory adminFacetSelectors = toml.readBytes("$.admin_facet_selectors"); + bytes memory gettersFacetSelectors = toml.readBytes("$.getters_facet_selectors"); + bytes memory mailboxFacetSelectors = toml.readBytes("$.mailbox_facet_selectors"); + bytes memory executorFacetSelectors = toml.readBytes("$.executor_facet_selectors"); + + bytes4[] memory adminFacetSelectorsArray = abi.decode(adminFacetSelectors, (bytes4[])); + bytes4[] memory gettersFacetSelectorsArray = abi.decode(gettersFacetSelectors, (bytes4[])); + bytes4[] memory mailboxFacetSelectorsArray = abi.decode(mailboxFacetSelectors, (bytes4[])); + bytes4[] memory executorFacetSelectorsArray = abi.decode(executorFacetSelectors, (bytes4[])); + + Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](4); + facetCuts[0] = Diamond.FacetCut({ + facet: addresses.stateTransition.adminFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: adminFacetSelectorsArray + }); + facetCuts[1] = Diamond.FacetCut({ + facet: addresses.stateTransition.gettersFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: gettersFacetSelectorsArray + }); + facetCuts[2] = Diamond.FacetCut({ + facet: addresses.stateTransition.mailboxFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: mailboxFacetSelectorsArray + }); + facetCuts[3] = Diamond.FacetCut({ + facet: addresses.stateTransition.executorFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: executorFacetSelectorsArray + }); + + VerifierParams memory verifierParams = VerifierParams({ + recursionNodeLevelVkHash: config.contracts.recursionNodeLevelVkHash, + recursionLeafLevelVkHash: config.contracts.recursionLeafLevelVkHash, + recursionCircuitsSetVksHash: config.contracts.recursionCircuitsSetVksHash + }); + + FeeParams memory feeParams = FeeParams({ + pubdataPricingMode: config.contracts.diamondInitPubdataPricingMode, + batchOverheadL1Gas: uint32(config.contracts.diamondInitBatchOverheadL1Gas), + maxPubdataPerBatch: uint32(config.contracts.diamondInitMaxPubdataPerBatch), + maxL2GasPerBatch: uint32(config.contracts.diamondInitMaxL2GasPerBatch), + priorityTxMaxPubdata: uint32(config.contracts.diamondInitPriorityTxMaxPubdata), + minimalL2GasPrice: uint64(config.contracts.diamondInitMinimalL2GasPrice) + }); + + DiamondInitializeDataNewChain memory initializeData = DiamondInitializeDataNewChain({ + dualVerifier: IVerifier(addresses.stateTransition.dualVerifier), + plonkVerifier: addresses.stateTransition.plonkVerifier, + fflonkVerifier: addresses.stateTransition.fflonkVerifier, + fflonkProofLength: config.contracts.fflonkProofLength, + verifierParams: verifierParams, + l2BootloaderBytecodeHash: config.contracts.bootloaderHash, + l2DefaultAccountBytecodeHash: config.contracts.defaultAAHash, + priorityTxMaxGasLimit: config.contracts.priorityTxMaxGasLimit, + feeParams: feeParams, + blobVersionedHashRetriever: addresses.blobVersionedHashRetriever + }); + + Diamond.DiamondCutData memory diamondCut = Diamond.DiamondCutData({ + facetCuts: facetCuts, + initAddress: addresses.stateTransition.diamondInit, + initCalldata: abi.encode(initializeData) + }); + + config.contracts.diamondCutData = abi.encode(diamondCut); + + ChainCreationParams memory chainCreationParams = ChainCreationParams({ + genesisUpgrade: addresses.stateTransition.genesisUpgrade, + genesisBatchHash: config.contracts.genesisRoot, + genesisIndexRepeatedStorageChanges: uint64(config.contracts.genesisRollupLeafIndex), + genesisBatchCommitment: config.contracts.genesisBatchCommitment, + diamondCut: diamondCut, + forceDeploymentsData: generatedData.forceDeploymentsData + }); + + ChainTypeManagerInitializeData memory diamondInitData = ChainTypeManagerInitializeData({ + owner: msg.sender, + validatorTimelock: addresses.validatorTimelock, + chainCreationParams: chainCreationParams, + protocolVersion: config.contracts.latestProtocolVersion + }); + + address contractAddress = deployViaCreate2( + type(TransparentUpgradeableProxy).creationCode, + abi.encode( + addresses.stateTransition.chainTypeManagerImplementation, + addresses.transparentProxyAdmin, + abi.encodeCall(ChainTypeManager.initialize, (diamondInitData)) + ) + ); + console.log("ChainTypeManagerProxy deployed at:", contractAddress); + addresses.stateTransition.chainTypeManagerProxy = contractAddress; + } + + function test() internal virtual {} +} diff --git a/l1-contracts/deploy-scripts/GatewayCTMFromL1.s.sol b/l1-contracts/deploy-scripts/GatewayCTMFromL1.s.sol new file mode 100644 index 000000000..840b26316 --- /dev/null +++ b/l1-contracts/deploy-scripts/GatewayCTMFromL1.s.sol @@ -0,0 +1,433 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-custom-errors, reason-string + +import {Script, console2 as console} from "forge-std/Script.sol"; +// import {Vm} from "forge-std/Vm.sol"; +import {stdToml} from "forge-std/StdToml.sol"; + +import {Ownable} from "@openzeppelin/contracts-v4/access/Ownable.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "contracts/common/Config.sol"; +import {L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {StateTransitionDeployedAddresses, Utils, L2_BRIDGEHUB_ADDRESS} from "./Utils.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {L2ContractsBytecodesLib} from "./L2ContractsBytecodesLib.sol"; + +import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; +import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; +import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; + +import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; +import {Verifier} from "contracts/state-transition/Verifier.sol"; +import {VerifierParams, IVerifier} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; +import {ValidatorTimelock} from "contracts/state-transition/ValidatorTimelock.sol"; +import {L1GenesisUpgrade} from "contracts/upgrades/L1GenesisUpgrade.sol"; +import {DefaultUpgrade} from "contracts/upgrades/DefaultUpgrade.sol"; + +import {ChainTypeManager} from "contracts/state-transition/ChainTypeManager.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {InitializeDataNewChain as DiamondInitializeDataNewChain} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {ChainTypeManagerInitializeData, ChainCreationParams, IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; + +/// @notice Scripts that is responsible for preparing the chain to become a gateway +contract GatewayCTMFromL1 is Script { + using stdToml for string; + + address internal constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; + bytes32 internal constant STATE_TRANSITION_NEW_CHAIN_HASH = keccak256("NewHyperchain(uint256,address)"); + + address deployerAddress; + + // solhint-disable-next-line gas-struct-packing + struct Config { + address bridgehub; + address ctmDeploymentTracker; + address nativeTokenVault; + address chainTypeManagerProxy; + address sharedBridgeProxy; + address governance; + uint256 chainChainId; + uint256 eraChainId; + uint256 l1ChainId; + bool testnetVerifier; + bytes32 recursionNodeLevelVkHash; + bytes32 recursionLeafLevelVkHash; + bytes32 recursionCircuitsSetVksHash; + PubdataPricingMode diamondInitPubdataPricingMode; + uint256 diamondInitBatchOverheadL1Gas; + uint256 diamondInitMaxPubdataPerBatch; + uint256 diamondInitMaxL2GasPerBatch; + uint256 diamondInitPriorityTxMaxPubdata; + uint256 diamondInitMinimalL2GasPrice; + bytes32 bootloaderHash; + bytes32 defaultAAHash; + uint256 priorityTxMaxGasLimit; + bytes32 genesisRoot; + uint256 genesisRollupLeafIndex; + bytes32 genesisBatchCommitment; + uint256 latestProtocolVersion; + bytes forceDeploymentsData; + uint256 fflonkProofLength; + } + + struct Output { + StateTransitionDeployedAddresses gatewayStateTransition; + address multicall3; + bytes diamondCutData; + address relayedSLDAValidator; + address validiumDAValidator; + } + + Config internal config; + Output internal output; + + function run() public { + console.log("Setting up the Gateway script"); + + initializeConfig(); + deployGatewayContracts(); + + saveOutput(); + } + + function initializeConfig() internal { + deployerAddress = msg.sender; + string memory root = vm.projectRoot(); + string memory path = string.concat(root, "/script-config/config-deploy-gateway-ctm.toml"); + string memory toml = vm.readFile(path); + + // Config file must be parsed key by key, otherwise values returned + // are parsed alfabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + + // Initializing all values at once is preferableo ensure type safety of + // the fact that all values are initialized + config = Config({ + bridgehub: toml.readAddress("$.bridgehub_proxy_addr"), + ctmDeploymentTracker: toml.readAddress("$.ctm_deployment_tracker_proxy_addr"), + nativeTokenVault: toml.readAddress("$.native_token_vault_addr"), + chainTypeManagerProxy: toml.readAddress("$.chain_type_manager_proxy_addr"), + sharedBridgeProxy: toml.readAddress("$.shared_bridge_proxy_addr"), + chainChainId: toml.readUint("$.chain_chain_id"), + governance: toml.readAddress("$.governance"), + l1ChainId: toml.readUint("$.l1_chain_id"), + eraChainId: toml.readUint("$.era_chain_id"), + testnetVerifier: toml.readBool("$.testnet_verifier"), + recursionNodeLevelVkHash: toml.readBytes32("$.recursion_node_level_vk_hash"), + recursionLeafLevelVkHash: toml.readBytes32("$.recursion_leaf_level_vk_hash"), + recursionCircuitsSetVksHash: toml.readBytes32("$.recursion_circuits_set_vks_hash"), + diamondInitPubdataPricingMode: PubdataPricingMode(toml.readUint("$.diamond_init_pubdata_pricing_mode")), + diamondInitBatchOverheadL1Gas: toml.readUint("$.diamond_init_batch_overhead_l1_gas"), + diamondInitMaxPubdataPerBatch: toml.readUint("$.diamond_init_max_pubdata_per_batch"), + diamondInitMaxL2GasPerBatch: toml.readUint("$.diamond_init_max_l2_gas_per_batch"), + diamondInitPriorityTxMaxPubdata: toml.readUint("$.diamond_init_priority_tx_max_pubdata"), + diamondInitMinimalL2GasPrice: toml.readUint("$.diamond_init_minimal_l2_gas_price"), + bootloaderHash: toml.readBytes32("$.bootloader_hash"), + defaultAAHash: toml.readBytes32("$.default_aa_hash"), + priorityTxMaxGasLimit: toml.readUint("$.priority_tx_max_gas_limit"), + genesisRoot: toml.readBytes32("$.genesis_root"), + genesisRollupLeafIndex: toml.readUint("$.genesis_rollup_leaf_index"), + genesisBatchCommitment: toml.readBytes32("$.genesis_batch_commitment"), + latestProtocolVersion: toml.readUint("$.latest_protocol_version"), + forceDeploymentsData: toml.readBytes("$.force_deployments_data") + }); + } + + function saveOutput() internal { + vm.serializeAddress( + "gateway_state_transition", + "chain_type_manager_proxy_addr", + output.gatewayStateTransition.chainTypeManagerProxy + ); + vm.serializeAddress( + "gateway_state_transition", + "chain_type_manager_implementation_addr", + output.gatewayStateTransition.chainTypeManagerImplementation + ); + vm.serializeAddress("gateway_state_transition", "verifier_addr", output.gatewayStateTransition.verifier); + vm.serializeAddress("gateway_state_transition", "admin_facet_addr", output.gatewayStateTransition.adminFacet); + vm.serializeAddress( + "gateway_state_transition", + "mailbox_facet_addr", + output.gatewayStateTransition.mailboxFacet + ); + vm.serializeAddress( + "gateway_state_transition", + "executor_facet_addr", + output.gatewayStateTransition.executorFacet + ); + vm.serializeAddress( + "gateway_state_transition", + "getters_facet_addr", + output.gatewayStateTransition.gettersFacet + ); + vm.serializeAddress("gateway_state_transition", "diamond_init_addr", output.gatewayStateTransition.diamondInit); + vm.serializeAddress( + "gateway_state_transition", + "genesis_upgrade_addr", + output.gatewayStateTransition.genesisUpgrade + ); + vm.serializeAddress( + "gateway_state_transition", + "default_upgrade_addr", + output.gatewayStateTransition.defaultUpgrade + ); + vm.serializeAddress( + "gateway_state_transition", + "validator_timelock_addr", + output.gatewayStateTransition.validatorTimelock + ); + string memory gatewayStateTransition = vm.serializeAddress( + "gateway_state_transition", + "diamond_proxy_addr", + output.gatewayStateTransition.diamondProxy + ); + vm.serializeString("root", "gateway_state_transition", gatewayStateTransition); + vm.serializeAddress("root", "multicall3_addr", output.multicall3); + vm.serializeAddress("root", "relayed_sl_da_validator", output.relayedSLDAValidator); + vm.serializeAddress("root", "validium_da_validator", output.validiumDAValidator); + + string memory toml = vm.serializeBytes("root", "diamond_cut_data", output.diamondCutData); + string memory path = string.concat(vm.projectRoot(), "/script-out/output-deploy-gateway-ctm.toml"); + vm.writeToml(toml, path); + } + + /// @dev The sender may not have any privileges + function deployGatewayContracts() public { + output.multicall3 = _deployInternal(L2ContractsBytecodesLib.readMulticall3Bytecode(), hex""); + + deployGatewayFacets(); + + output.gatewayStateTransition.verifier = deployGatewayVerifier(); + output.gatewayStateTransition.validatorTimelock = deployValidatorTimelock(); + output.gatewayStateTransition.genesisUpgrade = address( + _deployInternal(L2ContractsBytecodesLib.readL1GenesisUpgradeBytecode(), hex"") + ); + console.log("Genesis upgrade deployed at", output.gatewayStateTransition.genesisUpgrade); + output.gatewayStateTransition.defaultUpgrade = address( + _deployInternal(L2ContractsBytecodesLib.readDefaultUpgradeBytecode(), hex"") + ); + console.log("Default upgrade deployed at", output.gatewayStateTransition.defaultUpgrade); + output.gatewayStateTransition.diamondInit = address( + _deployInternal(L2ContractsBytecodesLib.readDiamondInitBytecode(), hex"") + ); + console.log("Diamond init deployed at", output.gatewayStateTransition.diamondInit); + + deployGatewayChainTypeManager(); + setChainTypeManagerInValidatorTimelock(); + + output.relayedSLDAValidator = _deployInternal( + L2ContractsBytecodesLib.readRelayedSLDAValidatorBytecode(), + hex"" + ); + + output.validiumDAValidator = _deployInternal( + L2ContractsBytecodesLib.readValidiumL1DAValidatorBytecode(), + hex"" + ); + } + + function _deployInternal(bytes memory bytecode, bytes memory constructorargs) internal returns (address) { + return + Utils.deployThroughL1({ + bytecode: bytecode, + constructorargs: constructorargs, + create2salt: bytes32(0), + l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, + factoryDeps: new bytes[](0), + chainId: config.chainChainId, + bridgehubAddress: config.bridgehub, + l1SharedBridgeProxy: config.sharedBridgeProxy + }); + } + + function deployGatewayFacets() internal { + address adminFacet = address( + _deployInternal(L2ContractsBytecodesLib.readAdminFacetBytecode(), abi.encode(config.l1ChainId)) + ); + console.log("Admin facet deployed at", adminFacet); + + address mailboxFacet = address( + _deployInternal( + L2ContractsBytecodesLib.readMailboxFacetBytecode(), + abi.encode(config.l1ChainId, config.eraChainId) + ) + ); + console.log("Mailbox facet deployed at", mailboxFacet); + + address executorFacet = address(_deployInternal(L2ContractsBytecodesLib.readExecutorFacetBytecode(), hex"")); + console.log("ExecutorFacet facet deployed at", executorFacet); + + address gettersFacet = address(_deployInternal(L2ContractsBytecodesLib.readGettersFacetBytecode(), hex"")); + console.log("Getters facet deployed at", gettersFacet); + + output.gatewayStateTransition.adminFacet = adminFacet; + output.gatewayStateTransition.mailboxFacet = mailboxFacet; + output.gatewayStateTransition.executorFacet = executorFacet; + output.gatewayStateTransition.gettersFacet = gettersFacet; + } + + function deployGatewayVerifier() internal returns (address verifier) { + if (config.testnetVerifier) { + verifier = address( + _deployInternal(L2ContractsBytecodesLib.readL2TestnetVerifierBytecode(), abi.encode(config.l1ChainId)) + ); + } else { + verifier = address(_deployInternal(L2ContractsBytecodesLib.readL2VerifierBytecode(), hex"")); + } + + console.log("Verifier deployed at", verifier); + } + + function deployValidatorTimelock() internal returns (address validatorTimelock) { + // address aliasedGovernor = AddressAliasHelper.applyL1ToL2Alias(config.governance); + // TODO(EVM-745): eventually the governance should be moved to the governance contract + // Note: we do not apply alias because the deployer is an EOA. + validatorTimelock = address( + _deployInternal( + L2ContractsBytecodesLib.readValidatorTimelockBytecode(), + abi.encode(deployerAddress, 0, config.eraChainId) + ) + ); + console.log("Validator timelock deployed at", validatorTimelock); + } + + function deployGatewayChainTypeManager() internal { + // We need to publish the bytecode of the diamdon proxy contract, + // we can only do it via deploying its dummy version. + // We could've published the dependency separately, but we just repeated the code that would be + // used for pure L2 execution. + address dp = address(_deployInternal(L2ContractsBytecodesLib.readDiamondProxyBytecode(), hex"")); + console.log("Dummy diamond proxy deployed at", dp); + + output.gatewayStateTransition.chainTypeManagerImplementation = address( + _deployInternal(L2ContractsBytecodesLib.readChainTypeManagerBytecode(), abi.encode(L2_BRIDGEHUB_ADDRESS)) + ); + console.log( + "StateTransitionImplementation deployed at", + output.gatewayStateTransition.chainTypeManagerImplementation + ); + + // TODO(EVM-745): eventually a proxy admin or something should be deplyoed here + Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](4); + facetCuts[0] = Diamond.FacetCut({ + facet: output.gatewayStateTransition.adminFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: Utils.getAllSelectorsForFacet("Admin") + }); + facetCuts[1] = Diamond.FacetCut({ + facet: output.gatewayStateTransition.gettersFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: Utils.getAllSelectorsForFacet("Getters") + }); + facetCuts[2] = Diamond.FacetCut({ + facet: output.gatewayStateTransition.mailboxFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getAllSelectorsForFacet("Mailbox") + }); + facetCuts[3] = Diamond.FacetCut({ + facet: output.gatewayStateTransition.executorFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getAllSelectorsForFacet("Executor") + }); + + VerifierParams memory verifierParams = VerifierParams({ + recursionNodeLevelVkHash: config.recursionNodeLevelVkHash, + recursionLeafLevelVkHash: config.recursionLeafLevelVkHash, + recursionCircuitsSetVksHash: config.recursionCircuitsSetVksHash + }); + + FeeParams memory feeParams = FeeParams({ + pubdataPricingMode: config.diamondInitPubdataPricingMode, + batchOverheadL1Gas: uint32(config.diamondInitBatchOverheadL1Gas), + maxPubdataPerBatch: uint32(config.diamondInitMaxPubdataPerBatch), + maxL2GasPerBatch: uint32(config.diamondInitMaxL2GasPerBatch), + priorityTxMaxPubdata: uint32(config.diamondInitPriorityTxMaxPubdata), + minimalL2GasPrice: uint64(config.diamondInitMinimalL2GasPrice) + }); + + DiamondInitializeDataNewChain memory initializeData = DiamondInitializeDataNewChain({ + dualVerifier: IVerifier(output.gatewayStateTransition.dualVerifier), + plonkVerifier: output.gatewayStateTransition.plonkVerifier, + fflonkVerifier: output.gatewayStateTransition.fflonkVerifier, + fflonkProofLength: config.fflonkProofLength, + verifierParams: verifierParams, + l2BootloaderBytecodeHash: config.bootloaderHash, + l2DefaultAccountBytecodeHash: config.defaultAAHash, + priorityTxMaxGasLimit: config.priorityTxMaxGasLimit, + feeParams: feeParams, + // We can not provide zero value there. At the same time, there is no such contract on gateway + blobVersionedHashRetriever: ADDRESS_ONE + }); + + Diamond.DiamondCutData memory diamondCut = Diamond.DiamondCutData({ + facetCuts: facetCuts, + initAddress: output.gatewayStateTransition.diamondInit, + initCalldata: abi.encode(initializeData) + }); + + output.diamondCutData = abi.encode(diamondCut); + + ChainCreationParams memory chainCreationParams = ChainCreationParams({ + genesisUpgrade: output.gatewayStateTransition.genesisUpgrade, + genesisBatchHash: config.genesisRoot, + genesisIndexRepeatedStorageChanges: uint64(config.genesisRollupLeafIndex), + genesisBatchCommitment: config.genesisBatchCommitment, + diamondCut: diamondCut, + // Note, it is the same as for contracts that are based on L2 + forceDeploymentsData: config.forceDeploymentsData + }); + + ChainTypeManagerInitializeData memory diamondInitData = ChainTypeManagerInitializeData({ + owner: msg.sender, + validatorTimelock: output.gatewayStateTransition.validatorTimelock, + chainCreationParams: chainCreationParams, + protocolVersion: config.latestProtocolVersion + }); + + output.gatewayStateTransition.chainTypeManagerProxy = _deployInternal( + L2ContractsBytecodesLib.readTransparentUpgradeableProxyBytecode(), + abi.encode( + output.gatewayStateTransition.chainTypeManagerImplementation, + deployerAddress, + abi.encodeCall(ChainTypeManager.initialize, (diamondInitData)) + ) + ); + + console.log("ChainTypeManagerProxy deployed at:", output.gatewayStateTransition.chainTypeManagerProxy); + output.gatewayStateTransition.chainTypeManagerProxy = output.gatewayStateTransition.chainTypeManagerProxy; + } + + function setChainTypeManagerInValidatorTimelock() internal { + bytes memory data = abi.encodeCall( + ValidatorTimelock.setChainTypeManager, + (IChainTypeManager(output.gatewayStateTransition.chainTypeManagerProxy)) + ); + + Utils.runL1L2Transaction({ + l2Calldata: data, + l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, + l2Value: 0, + factoryDeps: new bytes[](0), + dstAddress: output.gatewayStateTransition.validatorTimelock, + chainId: config.chainChainId, + bridgehubAddress: config.bridgehub, + l1SharedBridgeProxy: config.sharedBridgeProxy + }); + + console.log("ChainTypeManager set in ValidatorTimelock"); + } +} diff --git a/l1-contracts/deploy-scripts/GatewayPreparation.s.sol b/l1-contracts/deploy-scripts/GatewayPreparation.s.sol new file mode 100644 index 000000000..1c68dd82d --- /dev/null +++ b/l1-contracts/deploy-scripts/GatewayPreparation.s.sol @@ -0,0 +1,494 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-custom-errors, reason-string + +import {Script, console2 as console} from "forge-std/Script.sol"; +// import {Vm} from "forge-std/Vm.sol"; +import {stdToml} from "forge-std/StdToml.sol"; + +import {Ownable} from "@openzeppelin/contracts-v4/access/Ownable.sol"; +import {IBridgehub, BridgehubBurnCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "contracts/common/Config.sol"; +import {L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {StateTransitionDeployedAddresses, Utils, L2_BRIDGEHUB_ADDRESS} from "./Utils.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {ValidatorTimelock} from "contracts/state-transition/ValidatorTimelock.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {GatewayTransactionFilterer} from "contracts/transactionFilterer/GatewayTransactionFilterer.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {SET_ASSET_HANDLER_COUNTERPART_ENCODING_VERSION} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {CTM_DEPLOYMENT_TRACKER_ENCODING_VERSION} from "contracts/bridgehub/CTMDeploymentTracker.sol"; +import {L2AssetRouter, IL2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {BridgehubMintCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L2_ASSET_ROUTER_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {FinalizeL1DepositParams} from "contracts/bridge/interfaces/IL1Nullifier.sol"; + +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; + +// solhint-disable-next-line gas-struct-packing +struct Config { + address bridgehub; + address ctmDeploymentTracker; + address chainTypeManagerProxy; + address sharedBridgeProxy; + address governance; + uint256 gatewayChainId; + address gatewayChainAdmin; + address gatewayAccessControlRestriction; + address gatewayChainProxyAdmin; + address l1NullifierProxy; + bytes gatewayDiamondCutData; + bytes l1DiamondCutData; +} + +/// @notice Scripts that is responsible for preparing the chain to become a gateway +contract GatewayPreparation is Script { + using stdToml for string; + + address internal constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; + bytes32 internal constant STATE_TRANSITION_NEW_CHAIN_HASH = keccak256("NewHyperchain(uint256,address)"); + + address deployerAddress; + uint256 l1ChainId; + + struct Output { + bytes32 governanceL2TxHash; + address gatewayTransactionFiltererImplementation; + address gatewayTransactionFiltererProxy; + } + + Config internal config; + + function run() public { + console.log("Setting up the Gateway script"); + + initializeConfig(); + } + + function _getL1GasPrice() internal virtual returns (uint256) { + return Utils.bytesToUint256(vm.rpc("eth_gasPrice", "[]")); + } + + function initializeConfig() internal virtual { + deployerAddress = msg.sender; + l1ChainId = block.chainid; + + string memory root = vm.projectRoot(); + string memory path = string.concat(root, vm.envString("GATEWAY_PREPARATION_L1_CONFIG")); + string memory toml = vm.readFile(path); + + // Config file must be parsed key by key, otherwise values returned + // are parsed alfabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + + // Initializing all values at once is preferable to ensure type safety of + // the fact that all values are initialized + config = Config({ + bridgehub: toml.readAddress("$.bridgehub_proxy_addr"), + ctmDeploymentTracker: toml.readAddress("$.ctm_deployment_tracker_proxy_addr"), + chainTypeManagerProxy: toml.readAddress("$.chain_type_manager_proxy_addr"), + sharedBridgeProxy: toml.readAddress("$.shared_bridge_proxy_addr"), + gatewayChainId: toml.readUint("$.chain_chain_id"), + governance: toml.readAddress("$.governance"), + gatewayDiamondCutData: toml.readBytes("$.gateway_diamond_cut_data"), + l1DiamondCutData: toml.readBytes("$.l1_diamond_cut_data"), + gatewayChainAdmin: toml.readAddress("$.chain_admin"), + gatewayAccessControlRestriction: toml.readAddress("$.access_control_restriction"), + gatewayChainProxyAdmin: toml.readAddress("$.chain_proxy_admin"), + l1NullifierProxy: toml.readAddress("$.l1_nullifier_proxy_addr") + }); + } + + function saveOutput(Output memory output) internal { + vm.serializeAddress( + "root", + "gateway_transaction_filterer_implementation", + output.gatewayTransactionFiltererImplementation + ); + vm.serializeAddress("root", "gateway_transaction_filterer_proxy", output.gatewayTransactionFiltererProxy); + string memory toml = vm.serializeBytes32("root", "governance_l2_tx_hash", output.governanceL2TxHash); + string memory path = string.concat(vm.projectRoot(), "/script-out/output-gateway-preparation-l1.toml"); + vm.writeToml(toml, path); + } + + function saveOutput(bytes32 governanceL2TxHash) internal { + Output memory output = Output({ + governanceL2TxHash: governanceL2TxHash, + gatewayTransactionFiltererImplementation: address(0), + gatewayTransactionFiltererProxy: address(0) + }); + + saveOutput(output); + } + + function saveOutput( + address gatewayTransactionFiltererImplementation, + address gatewayTransactionFiltererProxy + ) internal { + Output memory output = Output({ + governanceL2TxHash: bytes32(0), + gatewayTransactionFiltererImplementation: gatewayTransactionFiltererImplementation, + gatewayTransactionFiltererProxy: gatewayTransactionFiltererProxy + }); + + saveOutput(output); + } + + /// @dev Requires the sender to be the owner of the contract + function governanceRegisterGateway() public { + initializeConfig(); + + IBridgehub bridgehub = IBridgehub(config.bridgehub); + + if (bridgehub.whitelistedSettlementLayers(config.gatewayChainId)) { + console.log("Chain already whitelisted as settlement layer"); + } else { + bytes memory data = abi.encodeCall(bridgehub.registerSettlementLayer, (config.gatewayChainId, true)); + Utils.executeUpgrade({ + _governor: config.governance, + _salt: bytes32(0), + _target: address(bridgehub), + _data: data, + _value: 0, + _delay: 0 + }); + console.log("Gateway whitelisted as settlement layer"); + } + // No tx has been executed, so we save an empty hash + saveOutput(bytes32(0)); + } + + /// @dev Requires the sender to be the owner of the contract + function governanceWhitelistGatewayCTM(address gatewayCTMAddress, bytes32 governanoceOperationSalt) public { + initializeConfig(); + + bytes memory data = abi.encodeCall(IBridgehub.addChainTypeManager, (gatewayCTMAddress)); + + bytes32 l2TxHash = Utils.runGovernanceL1L2DirectTransaction( + _getL1GasPrice(), + config.governance, + governanoceOperationSalt, + data, + Utils.MAX_PRIORITY_TX_GAS, + new bytes[](0), + L2_BRIDGEHUB_ADDRESS, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy + ); + + saveOutput(l2TxHash); + } + + function governanceSetCTMAssetHandler(bytes32 governanoceOperationSalt) public { + initializeConfig(); + + bytes32 assetId = IBridgehub(config.bridgehub).ctmAssetIdFromAddress(config.chainTypeManagerProxy); + + // This should be equivalent to `config.chainTypeManagerProxy`, but we just double checking to ensure that + // bridgehub was initialized correctly + address ctmAddress = IBridgehub(config.bridgehub).ctmAssetIdToAddress(assetId); + require(ctmAddress == config.chainTypeManagerProxy, "CTM asset id does not match the expected CTM address"); + + bytes memory secondBridgeData = abi.encodePacked( + SET_ASSET_HANDLER_COUNTERPART_ENCODING_VERSION, + abi.encode(assetId, L2_BRIDGEHUB_ADDRESS) + ); + + bytes32 l2TxHash = Utils.runGovernanceL1L2TwoBridgesTransaction( + _getL1GasPrice(), + config.governance, + governanoceOperationSalt, + Utils.MAX_PRIORITY_TX_GAS, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy, + config.sharedBridgeProxy, + 0, + secondBridgeData + ); + + saveOutput(l2TxHash); + } + + function registerAssetIdInBridgehub(address gatewayCTMAddress, bytes32 governanoceOperationSalt) public { + initializeConfig(); + + bytes memory secondBridgeData = abi.encodePacked( + bytes1(0x01), + abi.encode(config.chainTypeManagerProxy, gatewayCTMAddress) + ); + + bytes32 l2TxHash = Utils.runGovernanceL1L2TwoBridgesTransaction( + _getL1GasPrice(), + config.governance, + governanoceOperationSalt, + Utils.MAX_PRIORITY_TX_GAS, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy, + config.ctmDeploymentTracker, + 0, + secondBridgeData + ); + + saveOutput(l2TxHash); + } + + /// @dev Calling this function requires private key to the admin of the chain + function migrateChainToGateway(address chainAdmin, address accessControlRestriction, uint256 chainId) public { + initializeConfig(); + + // TODO(EVM-746): Use L2-based chain admin contract + address l2ChainAdmin = AddressAliasHelper.applyL1ToL2Alias(chainAdmin); + + bytes32 chainAssetId = IBridgehub(config.bridgehub).ctmAssetIdFromChainId(chainId); + + uint256 currentSettlementLayer = IBridgehub(config.bridgehub).settlementLayer(chainId); + if (currentSettlementLayer == config.gatewayChainId) { + console.log("Chain already using gateway as its settlement layer"); + saveOutput(bytes32(0)); + return; + } + + bytes memory bridgehubData = abi.encode( + BridgehubBurnCTMAssetData({ + chainId: chainId, + ctmData: abi.encode(l2ChainAdmin, config.gatewayDiamondCutData), + chainData: abi.encode(IZKChain(IBridgehub(config.bridgehub).getZKChain(chainId)).getProtocolVersion()) + }) + ); + + // TODO: use constant for the 0x01 + bytes memory secondBridgeData = abi.encodePacked(bytes1(0x01), abi.encode(chainAssetId, bridgehubData)); + + bytes32 l2TxHash = Utils.runAdminL1L2TwoBridgesTransaction( + _getL1GasPrice(), + chainAdmin, + accessControlRestriction, + Utils.MAX_PRIORITY_TX_GAS, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy, + config.sharedBridgeProxy, + 0, + secondBridgeData + ); + + saveOutput(l2TxHash); + } + + /// @dev Calling this function requires private key to the admin of the chain + function startMigrateChainFromGateway( + address chainAdmin, + address accessControlRestriction, + uint256 chainId + ) public { + initializeConfig(); + IBridgehub bridgehub = IBridgehub(config.bridgehub); + + uint256 currentSettlementLayer = bridgehub.settlementLayer(chainId); + if (currentSettlementLayer != config.gatewayChainId) { + console.log("Chain not using Gateway as settlement layer"); + saveOutput(bytes32(0)); + return; + } + + bytes memory bridgehubBurnData = abi.encode( + BridgehubBurnCTMAssetData({ + chainId: chainId, + ctmData: abi.encode(chainAdmin, config.l1DiamondCutData), + chainData: abi.encode(IChainTypeManager(config.chainTypeManagerProxy).getProtocolVersion(chainId)) + }) + ); + + bytes32 ctmAssetId = bridgehub.ctmAssetIdFromChainId(chainId); + L2AssetRouter l2AssetRouter = L2AssetRouter(L2_ASSET_ROUTER_ADDR); + bytes memory l2Calldata = abi.encodeCall(IL2AssetRouter.withdraw, (ctmAssetId, bridgehubBurnData)); + bytes32 l2TxHash = Utils.runAdminL1L2DirectTransaction( + _getL1GasPrice(), + chainAdmin, + accessControlRestriction, + l2Calldata, + Utils.MAX_PRIORITY_TX_GAS, + new bytes[](0), + L2_ASSET_ROUTER_ADDR, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy + ); + + saveOutput(l2TxHash); + } + + function finishMigrateChainFromGateway( + uint256 migratingChainId, + uint256 gatewayChainId, + uint256 l2BatchNumber, + uint256 l2MessageIndex, + uint16 l2TxNumberInBatch, + bytes memory message, + bytes32[] memory merkleProof + ) public { + initializeConfig(); + + L1Nullifier l1Nullifier = L1Nullifier(config.l1NullifierProxy); + IBridgehub bridgehub = IBridgehub(config.bridgehub); + bytes32 assetId = bridgehub.ctmAssetIdFromChainId(migratingChainId); + vm.broadcast(); + l1Nullifier.finalizeDeposit( + FinalizeL1DepositParams({ + chainId: gatewayChainId, + l2BatchNumber: l2BatchNumber, + l2MessageIndex: l2MessageIndex, + l2Sender: L2_ASSET_ROUTER_ADDR, + l2TxNumberInBatch: l2TxNumberInBatch, + message: message, + merkleProof: merkleProof + }) + ); + } + + /// @dev Calling this function requires private key to the admin of the chain + function setDAValidatorPair( + address chainAdmin, + address accessControlRestriction, + uint256 chainId, + address l1DAValidator, + address l2DAValidator, + address chainDiamondProxyOnGateway + ) public { + initializeConfig(); + + bytes memory data = abi.encodeCall(IAdmin.setDAValidatorPair, (l1DAValidator, l2DAValidator)); + + bytes32 l2TxHash = Utils.runAdminL1L2DirectTransaction( + _getL1GasPrice(), + chainAdmin, + accessControlRestriction, + data, + Utils.MAX_PRIORITY_TX_GAS, + new bytes[](0), + chainDiamondProxyOnGateway, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy + ); + + saveOutput(l2TxHash); + } + + function enableValidator( + address chainAdmin, + address accessControlRestriction, + uint256 chainId, + address validatorAddress, + address gatewayValidatorTimelock + ) public { + initializeConfig(); + + bytes memory data = abi.encodeCall(ValidatorTimelock.addValidator, (chainId, validatorAddress)); + + bytes32 l2TxHash = Utils.runAdminL1L2DirectTransaction( + _getL1GasPrice(), + chainAdmin, + accessControlRestriction, + data, + Utils.MAX_PRIORITY_TX_GAS, + new bytes[](0), + gatewayValidatorTimelock, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy + ); + + saveOutput(l2TxHash); + } + + /// TODO(EVM-748): make that function support non-ETH based chains + function supplyGatewayWallet(address addr, uint256 amount) public { + initializeConfig(); + + Utils.runL1L2Transaction( + hex"", + Utils.MAX_PRIORITY_TX_GAS, + amount, + new bytes[](0), + addr, + config.gatewayChainId, + config.bridgehub, + config.sharedBridgeProxy + ); + + // We record L2 tx hash only for governance operations + saveOutput(bytes32(0)); + } + + /// The caller of this function should have private key of the admin of the *gateway* + function deployAndSetGatewayTransactionFilterer() public { + initializeConfig(); + + vm.broadcast(); + GatewayTransactionFilterer impl = new GatewayTransactionFilterer( + IBridgehub(config.bridgehub), + config.sharedBridgeProxy + ); + + vm.broadcast(); + TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy( + address(impl), + config.gatewayChainProxyAdmin, + abi.encodeCall(GatewayTransactionFilterer.initialize, (config.gatewayChainAdmin)) + ); + + GatewayTransactionFilterer proxyAsFilterer = GatewayTransactionFilterer(address(proxy)); + + IZKChain chain = IZKChain(IBridgehub(config.bridgehub).getZKChain(config.gatewayChainId)); + + // Firstly, we set the filterer + Utils.adminExecute({ + _admin: config.gatewayChainAdmin, + _accessControlRestriction: config.gatewayAccessControlRestriction, + _target: address(chain), + _data: abi.encodeCall(IAdmin.setTransactionFilterer, (address(proxyAsFilterer))), + _value: 0 + }); + + _grantWhitelist(address(proxy), config.gatewayChainAdmin); + _grantWhitelist(address(proxy), config.sharedBridgeProxy); + _grantWhitelist(address(proxy), config.ctmDeploymentTracker); + + // Then, we grant the whitelist to a few addresses + + saveOutput(address(impl), address(proxy)); + } + + function grantWhitelist(address filtererProxy, address[] memory addresses) public { + initializeConfig(); + + for (uint256 i = 0; i < addresses.length; i++) { + if (GatewayTransactionFilterer(filtererProxy).whitelistedSenders(addresses[i])) { + console.log("Address already whitelisted: ", addresses[i]); + } else { + _grantWhitelist(filtererProxy, addresses[i]); + } + } + } + + function _grantWhitelist(address filtererProxy, address addr) internal { + Utils.adminExecute({ + _admin: config.gatewayChainAdmin, + _accessControlRestriction: config.gatewayAccessControlRestriction, + _target: address(filtererProxy), + _data: abi.encodeCall(GatewayTransactionFilterer.grantWhitelist, (addr)), + _value: 0 + }); + } +} diff --git a/l1-contracts/deploy-scripts/InitializeL2WethToken.s.sol b/l1-contracts/deploy-scripts/InitializeL2WethToken.s.sol index 22c427a13..c9b1de8c6 100644 --- a/l1-contracts/deploy-scripts/InitializeL2WethToken.s.sol +++ b/l1-contracts/deploy-scripts/InitializeL2WethToken.s.sol @@ -1,11 +1,11 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; +pragma solidity ^0.8.21; // solhint-disable no-console import {Script, console2 as console} from "forge-std/Script.sol"; import {stdToml} from "forge-std/StdToml.sol"; -import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; import {Utils} from "./Utils.sol"; import {L2TransactionRequestDirect} from "contracts/bridgehub/IBridgehub.sol"; @@ -14,6 +14,7 @@ import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; contract InitializeL2WethTokenScript is Script { using stdToml for string; + // solhint-disable-next-line gas-struct-packing struct Config { address deployerAddress; address create2FactoryAddr; @@ -31,7 +32,7 @@ contract InitializeL2WethTokenScript is Script { uint256 gasMultiplier; } - Config config; + Config internal config; function run() public { initializeConfig(); diff --git a/l1-contracts/deploy-scripts/L2ContractsBytecodesLib.sol b/l1-contracts/deploy-scripts/L2ContractsBytecodesLib.sol new file mode 100644 index 000000000..9b672deb2 --- /dev/null +++ b/l1-contracts/deploy-scripts/L2ContractsBytecodesLib.sol @@ -0,0 +1,305 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import "./Utils.sol"; + +/// @title L2ContractsBytecodesLib +/// @notice Library providing functions to read bytecodes of L2 contracts individually. +library L2ContractsBytecodesLib { + /// @notice Reads the bytecode of the Bridgehub contract. + /// @return The bytecode of the Bridgehub contract. + function readBridgehubBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode("/../l1-contracts/artifacts-zk/contracts/bridgehub/Bridgehub.sol/Bridgehub.json"); + } + + /// @notice Reads the bytecode of the L2NativeTokenVault contract. + /// @return The bytecode of the L2NativeTokenVault contract. + function readL2NativeTokenVaultBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/bridge/ntv/L2NativeTokenVault.sol/L2NativeTokenVault.json" + ); + } + + /// @notice Reads the bytecode of the L2AssetRouter contract. + /// @return The bytecode of the L2AssetRouter contract. + function readL2AssetRouterBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/bridge/asset-router/L2AssetRouter.sol/L2AssetRouter.json" + ); + } + + /// @notice Reads the bytecode of the MessageRoot contract. + /// @return The bytecode of the MessageRoot contract. + function readMessageRootBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/bridgehub/MessageRoot.sol/MessageRoot.json" + ); + } + + /// @notice Reads the bytecode of the UpgradeableBeacon contract. + /// @return The bytecode of the UpgradeableBeacon contract. + function readUpgradeableBeaconBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol/UpgradeableBeacon.json" + ); + } + + /// @notice Reads the bytecode of the BeaconProxy contract. + /// @return The bytecode of the BeaconProxy contract. + function readBeaconProxyBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol/BeaconProxy.json" + ); + } + + /// @notice Reads the bytecode of the BridgedStandardERC20 contract. + /// @return The bytecode of the BridgedStandardERC20 contract. + function readStandardERC20Bytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/bridge/BridgedStandardERC20.sol/BridgedStandardERC20.json" + ); + } + + /// @notice Reads the bytecode of the TransparentUpgradeableProxy contract. + /// @return The bytecode of the TransparentUpgradeableProxy contract. + function readTransparentUpgradeableProxyBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol/TransparentUpgradeableProxy.json" + ); + } + + /// @notice Reads the bytecode of the ForceDeployUpgrader contract. + /// @return The bytecode of the ForceDeployUpgrader contract. + function readForceDeployUpgraderBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l2-contracts/artifacts-zk/contracts/ForceDeployUpgrader.sol/ForceDeployUpgrader.json" + ); + } + + /// @notice Reads the bytecode of the RollupL2DAValidator contract. + /// @return The bytecode of the RollupL2DAValidator contract. + function readRollupL2DAValidatorBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l2-contracts/artifacts-zk/contracts/data-availability/RollupL2DAValidator.sol/RollupL2DAValidator.json" + ); + } + + /// @notice Reads the bytecode of the ValidiumL2DAValidator contract. + /// @return The bytecode of the ValidiumL2DAValidator contract. + function readValidiumL2DAValidatorBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l2-contracts/artifacts-zk/contracts/data-availability/ValidiumL2DAValidator.sol/ValidiumL2DAValidator.json" + ); + } + + /// @notice Reads the bytecode of the ChainTypeManager contract. + /// @return The bytecode of the ChainTypeManager contract. + function readChainTypeManagerBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/ChainTypeManager.sol/ChainTypeManager.json" + ); + } + + /// @notice Reads the bytecode of the AdminFacet contract. + /// @return The bytecode of the AdminFacet contract. + function readAdminFacetBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/chain-deps/facets/Admin.sol/AdminFacet.json" + ); + } + + /// @notice Reads the bytecode of the MailboxFacet contract. + /// @return The bytecode of the MailboxFacet contract. + function readMailboxFacetBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/chain-deps/facets/Mailbox.sol/MailboxFacet.json" + ); + } + + /// @notice Reads the bytecode of the ExecutorFacet contract. + /// @return The bytecode of the ExecutorFacet contract. + function readExecutorFacetBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/chain-deps/facets/Executor.sol/ExecutorFacet.json" + ); + } + + /// @notice Reads the bytecode of the GettersFacet contract. + /// @return The bytecode of the GettersFacet contract. + function readGettersFacetBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/chain-deps/facets/Getters.sol/GettersFacet.json" + ); + } + + /// @notice Reads the bytecode of the Verifier contract. + /// @return The bytecode of the Verifier contract. + function readVerifierBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/Verifier.sol/Verifier.json" + ); + } + + /// @notice Reads the bytecode of the L2 Verifier contract. + /// @return The bytecode of the Verifier contract. + function readL2VerifierBytecode() internal view returns (bytes memory) { + return Utils.readHardhatBytecode("/../l2-contracts/artifacts-zk/contracts/verifier/Verifier.sol/Verifier.json"); + } + + /// @notice Reads the bytecode of the Verifier contract. + /// @return The bytecode of the Verifier contract. + function readConsensusRegistryBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l2-contracts/artifacts-zk/contracts/ConsensusRegistry.sol/ConsensusRegistry.json" + ); + } + + /// @notice Reads the bytecode of the TestnetVerifier contract. + /// @return The bytecode of the TestnetVerifier contract. + function readL2TestnetVerifierBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l2-contracts/artifacts-zk/contracts/verifier/TestnetVerifier.sol/TestnetVerifier.json" + ); + } + + /// @notice Reads the bytecode of the ValidatorTimelock contract. + /// @return The bytecode of the ValidatorTimelock contract. + function readValidatorTimelockBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/ValidatorTimelock.sol/ValidatorTimelock.json" + ); + } + + /// @notice Reads the bytecode of the DiamondInit contract. + /// @return The bytecode of the DiamondInit contract. + function readDiamondInitBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/chain-deps/DiamondInit.sol/DiamondInit.json" + ); + } + + /// @notice Reads the bytecode of the DiamondProxy contract. + /// @return The bytecode of the DiamondProxy contract. + function readDiamondProxyBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/chain-deps/DiamondProxy.sol/DiamondProxy.json" + ); + } + + /// @notice Reads the bytecode of the L1GenesisUpgrade contract. + /// @return The bytecode of the L1GenesisUpgrade contract. + function readL1GenesisUpgradeBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/upgrades/L1GenesisUpgrade.sol/L1GenesisUpgrade.json" + ); + } + + /// @notice Reads the bytecode of the DefaultUpgrade contract. + /// @return The bytecode of the DefaultUpgrade contract. + function readDefaultUpgradeBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/upgrades/DefaultUpgrade.sol/DefaultUpgrade.json" + ); + } + + /// @notice Reads the bytecode of the Multicall3 contract. + /// @return The bytecode of the Multicall3 contract. + function readMulticall3Bytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/dev-contracts/Multicall3.sol/Multicall3.json" + ); + } + + /// @notice Reads the bytecode of the RelayedSLDAValidator contract. + /// @return The bytecode of the RelayedSLDAValidator contract. + function readRelayedSLDAValidatorBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/data-availability/RelayedSLDAValidator.sol/RelayedSLDAValidator.json" + ); + } + + function readValidiumL1DAValidatorBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/state-transition/data-availability/ValidiumL1DAValidator.sol/ValidiumL1DAValidator.json" + ); + } + + /// @notice Reads the bytecode of the L2SharedBridgeLegacy contract. + /// @return The bytecode of the L2SharedBridgeLegacy contract. + function readL2LegacySharedBridgeBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/bridge/L2SharedBridgeLegacy.sol/L2SharedBridgeLegacy.json" + ); + } + + /// @notice Reads the bytecode of the L2GatewayUpgrade contract. + /// @return The bytecode of the L2GatewayUpgrade contract. + function readGatewayUpgradeBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../system-contracts/artifacts-zk/contracts-preprocessed/L2GatewayUpgrade.sol/L2GatewayUpgrade.json" + ); + } + + /// @notice Reads the bytecode of the L2GatewayUpgrade contract. + /// @return The bytecode of the L2GatewayUpgrade contract. + function readL2AdminFactoryBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/governance/L2AdminFactory.sol/L2AdminFactory.json" + ); + } + + function readProxyAdminBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol/ProxyAdmin.json" + ); + } + + /// @notice Reads the bytecode of the L2GatewayUpgrade contract. + /// @return The bytecode of the L2GatewayUpgrade contract. + function readPermanentRestrictionBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/governance/PermanentRestriction.sol/PermanentRestriction.json" + ); + } + + /// @notice Reads the bytecode of the L2ProxyAdminDeployer contract. + /// @return The bytecode of the L2ProxyAdminDeployer contract. + function readProxyAdminDeployerBytecode() internal view returns (bytes memory) { + return + Utils.readHardhatBytecode( + "/../l1-contracts/artifacts-zk/contracts/governance/L2ProxyAdminDeployer.sol/L2ProxyAdminDeployer.json" + ); + } +} diff --git a/l1-contracts/deploy-scripts/PrepareZKChainRegistrationCalldata.s.sol b/l1-contracts/deploy-scripts/PrepareZKChainRegistrationCalldata.s.sol new file mode 100644 index 000000000..618ee3c64 --- /dev/null +++ b/l1-contracts/deploy-scripts/PrepareZKChainRegistrationCalldata.s.sol @@ -0,0 +1,369 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-struct-packing, gas-custom-errors + +import {Script, console2 as console} from "forge-std/Script.sol"; +import {stdToml} from "forge-std/StdToml.sol"; + +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {L1AssetRouter} from "contracts/bridge/L1AssetRouter.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IGovernance} from "contracts/governance/IGovernance.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {Utils} from "./Utils.sol"; + +/** + * @title Prepare ZKChain Registration Calldata + * @dev Script to prepare calldata for registering a new ZKChain on the bridgehub + * + * This script prepares calldata for registering a new ZKChain on the bridgehub + * That includes 3 steps: + * 1. Register base token on the bridgehub + * 2. Register the new ZKChain on the bridgehub + * 3. Initialize the L2 bridge on the L1 shared bridge + * + * The script precomputes the address of the L2 bridge to generate the calldata for the third step. + * It assumes that L1 governance is the owner of the L2 bridge and the L2 bridge is deployed by the msg.sender of the script. + * + * The script reads the configuration from a TOML file `script-config/prepare-registration-calldata.toml` + * and writes the output to a TOML file `script-out/output-prepare-registration-calldata.toml`. + * + * The output contains 4 fields: + * - scheduleCalldataStageOne: calldata for scheduling the first stage + * - executeCalldataStageOne: calldata for executing the first stage + * - scheduleCalldataStageTwo: calldata for scheduling the second stage + * - executeCalldataStageTwo: calldata for executing the second stage + * (stage 2 of the execution is the registration of the L2 bridge on the L1 shared bridge) + * + * The separation is required to ensure that there is no period of time where the L2 bridge is registered, so users + * can send their funds there, but they will be burned in case L2 bridge is not initialized by the chain operator. + * It is meant to be executed only after the L2 bridge is deployed. + * + * How to use: + * 1. Create a TOML file `script-config/prepare-registration-calldata.toml`, reference config at `deploy-script-config-template/config-prepare-registration-calldata.toml`. + * 2. Run the script impersonating the address that will deploy the L2 bridge, `forge script --rpc-url $RPC --sender $DEPLOYER PrepareZKChainRegistrationCalldata.s.sol` + * 3. Run the `scheduleCalldataStageOne` and `executeCalldataStageOne` on the L1 chain using governance. + * 4. Deploy the L2 bridge using the address from step 2. This address doesn't need any special permissions, just has to be consistent across all the stages. + * 5. Run the `scheduleCalldataStageTwo` and `executeCalldataStageTwo` on the L1 chain using governance. + * + */ +contract PrepareZKChainRegistrationCalldataScript is Script { + using stdToml for string; + + address internal constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; + + struct Config { + // Admin of the yet-to-be-registered chain (L1-based address) + address chainAdmin; + // CTM proxy address + address stateTransitionProxy; + // Chain ID of the new chain + uint256 chainId; + // Chain ID of the era (required for the L2 bridge deployment) + uint256 eraChainId; + // Salt for createNewChain call, also used as a governance operation salt + uint256 bridgehubCreateNewChainSalt; + // Address of the new chain's base token + address baseToken; + // Diamond cut data is a "configuration" for the Diamond proxy that will be created for a new chain. + // It can only be the one that's allowed by the CTM. It can be generated by the other scripts or taken from the + // `etc/env/ecosystems/ENV.yaml` file in `zksync-era` repository + bytes diamondCutData; + // Address of the L1 ERC20 bridge proxy (required for the L2 bridge deployment) + address erc20BridgeProxy; + } + + // Addresses of the contracts in the L1 ecosystem that are fetched from the chain + struct EcosystemContracts { + // Address of the L1 shared bridge proxy + address l1SharedBridgeProxy; + // Bridgehub proxy address + address bridgehub; + // Address of the governance contract for the L1 ecosystem + address governance; + } + + struct ContractsBytecodes { + // Default bytecode of the ERC-20 on L2 (BeaconProxy) + bytes beaconProxy; + // Bytecode of the L2 shared bridge + bytes l2SharedBridgeBytecode; + // Bytecode of the L2 shared bridge proxy (TransparentUpgradeableProxy) + bytes l2SharedBridgeProxyBytecode; + } + + Config internal config; + EcosystemContracts internal ecosystem; + ContractsBytecodes internal bytecodes; + + function run() public { + console.log("Preparing ZK chain registration calldata"); + + initializeConfig(); + + checkBaseTokenAddress(); + + IGovernance.Call[] memory calls; + uint256 cnt = 0; + if (!IBridgehub(ecosystem.bridgehub).tokenIsRegistered(config.baseToken)) { + calls = new IGovernance.Call[](2); + console.log("Adding a call to register base token on the bridgehub"); + IGovernance.Call memory baseTokenRegistrationCall = prepareRegisterBaseTokenCall(); + calls[cnt] = baseTokenRegistrationCall; + ++cnt; + } else { + calls = new IGovernance.Call[](1); + } + + IGovernance.Call memory registerChainCall = prepareRegisterZKChainCall(); + calls[cnt] = registerChainCall; + ++cnt; + + address l2SharedBridgeProxy = computeL2BridgeAddress(); + IGovernance.Call memory initChainCall = prepareInitializeChainGovernanceCall(l2SharedBridgeProxy); + + scheduleTransparentCalldata(calls, initChainCall); + } + + function initializeConfig() internal { + // Grab config from output of l1 deployment + string memory root = vm.projectRoot(); + string memory path = string.concat(root, "/script-config/prepare-registration-calldata.toml"); + console.log("Reading a config from", path); + string memory toml = vm.readFile(path); + + // Config file must be parsed key by key, otherwise values returned + // are parsed alphabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + config.stateTransitionProxy = toml.readAddress("$.deployed_addresses.state_transition_proxy_addr"); + config.erc20BridgeProxy = toml.readAddress("$.deployed_addresses.erc20_bridge_proxy_addr"); + + ecosystem.bridgehub = IChainTypeManager(config.stateTransitionProxy).BRIDGE_HUB(); + ecosystem.l1SharedBridgeProxy = address(Bridgehub(ecosystem.bridgehub).sharedBridge()); + ecosystem.governance = Bridgehub(ecosystem.bridgehub).owner(); + + config.chainId = toml.readUint("$.chain.chain_id"); + config.eraChainId = toml.readUint("$.chain.era_chain_id"); + config.chainAdmin = toml.readAddress("$.chain.admin"); + config.diamondCutData = toml.readBytes("$.chain.diamond_cut_data"); + config.bridgehubCreateNewChainSalt = toml.readUint("$.chain.bridgehub_create_new_chain_salt"); + config.baseToken = toml.readAddress("$.chain.base_token_addr"); + + bytecodes.l2SharedBridgeBytecode = Utils.readHardhatBytecode("/script-config/artifacts/L2SharedBridge.json"); + bytecodes.beaconProxy = Utils.readHardhatBytecode("/script-config/artifacts/BeaconProxy.json"); + bytecodes.l2SharedBridgeProxyBytecode = Utils.readHardhatBytecode( + "/script-config/artifacts/TransparentUpgradeableProxy.json" + ); + } + + function checkBaseTokenAddress() internal view { + if (config.baseToken == address(0)) { + revert("Base token address is not set"); + } + + // Check if it's ethereum address + if (config.baseToken == ADDRESS_ONE) { + return; + } + + // Does not prevent registering a non-ERC-20 contract as a token + // But calling the ERC-20 methods here on non-ERC-20 will fail without a readable revert message + if (config.baseToken.code.length == 0) { + revert("Token address is an EOA"); + } + + console.log("Using base token address:", config.baseToken); + } + + function prepareRegisterBaseTokenCall() internal view returns (IGovernance.Call memory) { + Bridgehub bridgehub = Bridgehub(ecosystem.bridgehub); + + bytes memory data = abi.encodeCall( + bridgehub.addTokenAssetId, + (DataEncoding.encodeNTVAssetId(block.chainid, config.baseToken)) + ); + + return IGovernance.Call({target: ecosystem.bridgehub, value: 0, data: data}); + } + + // @dev Computes the address of the L2 bridge and the L2 bridge proxy + // The variables that are used to compute the address are: + // - Salt + // - L2 bridge impl bytecode hash + // - Era chain ID + // - Beacon proxy bytecode hash + // - L1 governance address(owner of the L2 bridge) + // - L1 ERC20 bridge proxy address + // - Default ERC20 proxy address + // - Deployer's address + function computeL2BridgeAddress() internal view returns (address) { + bytes32 salt = ""; + bytes32 bridgeBytecodeHash = L2ContractHelper.hashL2Bytecode(bytecodes.l2SharedBridgeBytecode); + bytes memory bridgeConstructorData = abi.encode(config.eraChainId); + + address deployer; + address l2GovernanceAddress; + + // ZKsync's protocol design assumes that the addresses of all the smart contracts that are sending L1->L2 + // messages are aliased. We have to check if the sender is an EOA and apply the alias if it is not. + if (isEOA(msg.sender)) { + deployer = msg.sender; + } else { + deployer = AddressAliasHelper.applyL1ToL2Alias(msg.sender); + } + + // If the governance address is an EOA, we use it directly, otherwise we apply the alias. On the Mainnet/Testnet + // the governance address is a smart contract, but in case someone uses the script with different envs, we have + // to check if the address is an EOA. + if (isEOA(ecosystem.governance)) { + l2GovernanceAddress = ecosystem.governance; + } else { + l2GovernanceAddress = AddressAliasHelper.applyL1ToL2Alias(ecosystem.governance); + } + + address implContractAddress = L2ContractHelper.computeCreate2Address( + deployer, + salt, + bridgeBytecodeHash, + keccak256(bridgeConstructorData) + ); + + console.log("Computed L2 bridge impl address:", implContractAddress); + console.log("Bridge bytecode hash:"); + console.logBytes32(bridgeBytecodeHash); + console.log("Bridge constructor data:"); + console.logBytes(bridgeConstructorData); + console.log("Deployer:", deployer); + + bytes32 l2StandardErc20BytecodeHash = L2ContractHelper.hashL2Bytecode(bytecodes.beaconProxy); + + // solhint-disable-next-line func-named-parameters + bytes memory proxyInitializationParams = abi.encodeWithSignature( + "initialize(address,address,bytes32,address)", + ecosystem.l1SharedBridgeProxy, + config.erc20BridgeProxy, + l2StandardErc20BytecodeHash, + l2GovernanceAddress + ); + + bytes memory l2SharedBridgeProxyConstructorData = abi.encode( + implContractAddress, + l2GovernanceAddress, + proxyInitializationParams + ); + + address proxyContractAddress = L2ContractHelper.computeCreate2Address( + deployer, + salt, + L2ContractHelper.hashL2Bytecode(bytecodes.l2SharedBridgeProxyBytecode), + keccak256(l2SharedBridgeProxyConstructorData) + ); + + console.log("Computed L2 bridge proxy address:", proxyContractAddress); + console.log("L1 shared bridge proxy:", ecosystem.l1SharedBridgeProxy); + console.log("L1 ERC20 bridge proxy:", config.erc20BridgeProxy); + console.log("L2 governor addr:", l2GovernanceAddress); + + return proxyContractAddress; + } + + function prepareRegisterZKChainCall() internal view returns (IGovernance.Call memory) { + Bridgehub bridgehub = Bridgehub(ecosystem.bridgehub); + + bytes memory data = abi.encodeCall( + bridgehub.createNewChain, + ( + config.chainId, + config.stateTransitionProxy, + config.baseToken, + config.bridgehubCreateNewChainSalt, + config.chainAdmin, + config.diamondCutData + ) + ); + + return IGovernance.Call({target: ecosystem.bridgehub, value: 0, data: data}); + } + + function prepareInitializeChainGovernanceCall( + address l2SharedBridgeProxy + ) internal view returns (IGovernance.Call memory) { + L1AssetRouter bridge = L1AssetRouter(ecosystem.l1SharedBridgeProxy); + + bytes memory data = abi.encodeCall(bridge.initializeChainGovernance, (config.chainId, l2SharedBridgeProxy)); + + return IGovernance.Call({target: ecosystem.l1SharedBridgeProxy, value: 0, data: data}); + } + + // @dev Prepares a call to schedule a transparent operation on the governance contract + // `calls` is an array of calls that will be executed in the first stage (add a token to BH, create a new chain) + // `initChainGovCall` is a call that will be executed in the second stage (register the L2 bridge on the L1 shared bridge) + function scheduleTransparentCalldata( + IGovernance.Call[] memory calls, + IGovernance.Call memory initChainGovCall + ) internal { + IGovernance governance = IGovernance(ecosystem.governance); + + IGovernance.Operation memory operation = IGovernance.Operation({ + calls: calls, + predecessor: bytes32(0), + salt: bytes32(config.bridgehubCreateNewChainSalt) + }); + + bytes memory scheduleCalldata = abi.encodeCall(governance.scheduleTransparent, (operation, 0)); + bytes memory executeCalldata = abi.encodeCall(governance.execute, (operation)); + + IGovernance.Call[] memory initChainGovArray = new IGovernance.Call[](1); + initChainGovArray[0] = initChainGovCall; + + IGovernance.Operation memory operation2 = IGovernance.Operation({ + calls: initChainGovArray, + predecessor: bytes32(0), + salt: bytes32(config.bridgehubCreateNewChainSalt) + }); + + bytes memory scheduleCalldata2 = abi.encodeCall(governance.scheduleTransparent, (operation2, 0)); + bytes memory executeCalldata2 = abi.encodeCall(governance.execute, (operation2)); + + saveOutput(scheduleCalldata, executeCalldata, scheduleCalldata2, executeCalldata2); + } + + // Writes the output to a TOML file `script-out/output-prepare-registration-calldata.toml + // For the detailed explanation of the output - look into the contract description + function saveOutput( + bytes memory schedule, + bytes memory execute, + bytes memory schedule2, + bytes memory execute2 + ) internal { + vm.serializeBytes("root", "scheduleCalldataStageOne", schedule); + vm.serializeBytes("root", "executeCalldataStageOne", execute); + vm.serializeBytes("root", "scheduleCalldataStageTwo", schedule2); + string memory toml = vm.serializeBytes("root", "executeCalldataStageTwo", execute2); + + string memory root = vm.projectRoot(); + string memory path = string.concat(root, "/script-out/output-prepare-registration-calldata.toml"); + + console.log("Writing the output to", path); + vm.writeToml(toml, path); + } + + function isEOA(address _addr) private view returns (bool) { + uint32 size; + assembly { + size := extcodesize(_addr) + } + + return (size == 0); + } +} + +// Done by the chain admin separately from this script: +// - add validators +// - deploy L2 contracts +// - set pubdata sending mode +// - set base token gas price multiplier diff --git a/l1-contracts/deploy-scripts/RegisterHyperchain.s.sol b/l1-contracts/deploy-scripts/RegisterHyperchain.s.sol deleted file mode 100644 index f5e23cf8d..000000000 --- a/l1-contracts/deploy-scripts/RegisterHyperchain.s.sol +++ /dev/null @@ -1,233 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -// solhint-disable no-console - -import {Script, console2 as console} from "forge-std/Script.sol"; -import {Vm} from "forge-std/Vm.sol"; -import {stdToml} from "forge-std/StdToml.sol"; - -import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; -import {IZkSyncHyperchain} from "contracts/state-transition/chain-interfaces/IZkSyncHyperchain.sol"; -import {ValidatorTimelock} from "contracts/state-transition/ValidatorTimelock.sol"; -import {Governance} from "contracts/governance/Governance.sol"; -import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; -import {Utils} from "./Utils.sol"; -import {PubdataPricingMode} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; - -contract RegisterHyperchainScript is Script { - using stdToml for string; - - address constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; - bytes32 constant STATE_TRANSITION_NEW_CHAIN_HASH = keccak256("NewHyperchain(uint256,address)"); - - struct Config { - address deployerAddress; - address ownerAddress; - uint256 chainChainId; - bool validiumMode; - uint256 bridgehubCreateNewChainSalt; - address validatorSenderOperatorCommitEth; - address validatorSenderOperatorBlobsEth; - address baseToken; - uint128 baseTokenGasPriceMultiplierNominator; - uint128 baseTokenGasPriceMultiplierDenominator; - address bridgehub; - address stateTransitionProxy; - address validatorTimelock; - bytes diamondCutData; - address governanceSecurityCouncilAddress; - uint256 governanceMinDelay; - address newDiamondProxy; - address governance; - address chainAdmin; - } - - Config config; - - function run() public { - console.log("Deploying Hyperchain"); - - initializeConfig(); - - deployGovernance(); - deployChainAdmin(); - checkTokenAddress(); - registerTokenOnBridgehub(); - registerHyperchain(); - addValidators(); - configureZkSyncStateTransition(); - setPendingAdmin(); - - saveOutput(); - } - - function initializeConfig() internal { - // Grab config from output of l1 deployment - string memory root = vm.projectRoot(); - string memory path = string.concat(root, "/script-config/register-hyperchain.toml"); - string memory toml = vm.readFile(path); - - config.deployerAddress = msg.sender; - - // Config file must be parsed key by key, otherwise values returned - // are parsed alfabetically and not by key. - // https://book.getfoundry.sh/cheatcodes/parse-toml - config.ownerAddress = toml.readAddress("$.owner_address"); - - config.bridgehub = toml.readAddress("$.deployed_addresses.bridgehub.bridgehub_proxy_addr"); - config.stateTransitionProxy = toml.readAddress( - "$.deployed_addresses.state_transition.state_transition_proxy_addr" - ); - config.validatorTimelock = toml.readAddress("$.deployed_addresses.validator_timelock_addr"); - - config.diamondCutData = toml.readBytes("$.contracts_config.diamond_cut_data"); - - config.chainChainId = toml.readUint("$.chain.chain_chain_id"); - config.bridgehubCreateNewChainSalt = toml.readUint("$.chain.bridgehub_create_new_chain_salt"); - config.baseToken = toml.readAddress("$.chain.base_token_addr"); - config.validiumMode = toml.readBool("$.chain.validium_mode"); - config.validatorSenderOperatorCommitEth = toml.readAddress("$.chain.validator_sender_operator_commit_eth"); - config.validatorSenderOperatorBlobsEth = toml.readAddress("$.chain.validator_sender_operator_blobs_eth"); - config.baseTokenGasPriceMultiplierNominator = uint128( - toml.readUint("$.chain.base_token_gas_price_multiplier_nominator") - ); - config.baseTokenGasPriceMultiplierDenominator = uint128( - toml.readUint("$.chain.base_token_gas_price_multiplier_denominator") - ); - config.governanceMinDelay = uint256(toml.readUint("$.chain.governance_min_delay")); - config.governanceSecurityCouncilAddress = toml.readAddress("$.chain.governance_security_council_address"); - } - - function checkTokenAddress() internal view { - if (config.baseToken == address(0)) { - revert("Token address is not set"); - } - - // Check if it's ethereum address - if (config.baseToken == ADDRESS_ONE) { - return; - } - - if (config.baseToken.code.length == 0) { - revert("Token address is not a contract address"); - } - - console.log("Using base token address:", config.baseToken); - } - - function registerTokenOnBridgehub() internal { - Bridgehub bridgehub = Bridgehub(config.bridgehub); - - if (bridgehub.tokenIsRegistered(config.baseToken)) { - console.log("Token already registered on Bridgehub"); - } else { - bytes memory data = abi.encodeCall(bridgehub.addToken, (config.baseToken)); - Utils.chainAdminMulticall({ - _chainAdmin: bridgehub.admin(), - _target: config.bridgehub, - _data: data, - _value: 0 - }); - console.log("Token registered on Bridgehub"); - } - } - - function deployGovernance() internal { - vm.broadcast(); - Governance governance = new Governance( - config.ownerAddress, - config.governanceSecurityCouncilAddress, - config.governanceMinDelay - ); - console.log("Governance deployed at:", address(governance)); - config.governance = address(governance); - } - - function deployChainAdmin() internal { - vm.broadcast(); - ChainAdmin chainAdmin = new ChainAdmin(config.ownerAddress, address(0)); - console.log("ChainAdmin deployed at:", address(chainAdmin)); - config.chainAdmin = address(chainAdmin); - } - - function registerHyperchain() internal { - Bridgehub bridgehub = Bridgehub(config.bridgehub); - - vm.recordLogs(); - bytes memory data = abi.encodeCall( - bridgehub.createNewChain, - ( - config.chainChainId, - config.stateTransitionProxy, - config.baseToken, - config.bridgehubCreateNewChainSalt, - msg.sender, - config.diamondCutData - ) - ); - - Utils.chainAdminMulticall({_chainAdmin: bridgehub.admin(), _target: config.bridgehub, _data: data, _value: 0}); - console.log("Hyperchain registered"); - - // Get new diamond proxy address from emitted events - Vm.Log[] memory logs = vm.getRecordedLogs(); - address diamondProxyAddress; - for (uint256 i = 0; i < logs.length; i++) { - if (logs[i].topics[0] == STATE_TRANSITION_NEW_CHAIN_HASH) { - diamondProxyAddress = address(uint160(uint256(logs[i].topics[2]))); - break; - } - } - if (diamondProxyAddress == address(0)) { - revert("Diamond proxy address not found"); - } - config.newDiamondProxy = diamondProxyAddress; - console.log("Hyperchain diamond proxy deployed at:", diamondProxyAddress); - } - - function addValidators() internal { - ValidatorTimelock validatorTimelock = ValidatorTimelock(config.validatorTimelock); - - vm.startBroadcast(); - validatorTimelock.addValidator(config.chainChainId, config.validatorSenderOperatorCommitEth); - validatorTimelock.addValidator(config.chainChainId, config.validatorSenderOperatorBlobsEth); - vm.stopBroadcast(); - - console.log("Validators added"); - } - - function configureZkSyncStateTransition() internal { - IZkSyncHyperchain hyperchain = IZkSyncHyperchain(config.newDiamondProxy); - - vm.startBroadcast(); - hyperchain.setTokenMultiplier( - config.baseTokenGasPriceMultiplierNominator, - config.baseTokenGasPriceMultiplierDenominator - ); - - if (config.validiumMode) { - hyperchain.setPubdataPricingMode(PubdataPricingMode.Validium); - } - - vm.stopBroadcast(); - console.log("ZkSync State Transition configured"); - } - - function setPendingAdmin() internal { - IZkSyncHyperchain hyperchain = IZkSyncHyperchain(config.newDiamondProxy); - - vm.broadcast(); - hyperchain.setPendingAdmin(config.chainAdmin); - console.log("Owner for ", config.newDiamondProxy, "set to", config.chainAdmin); - } - - function saveOutput() internal { - vm.serializeAddress("root", "diamond_proxy_addr", config.newDiamondProxy); - vm.serializeAddress("root", "chain_admin_addr", config.chainAdmin); - string memory toml = vm.serializeAddress("root", "governance_addr", config.governance); - string memory root = vm.projectRoot(); - string memory path = string.concat(root, "/script-out/output-register-hyperchain.toml"); - vm.writeToml(toml, path); - } -} diff --git a/l1-contracts/deploy-scripts/RegisterZKChain.s.sol b/l1-contracts/deploy-scripts/RegisterZKChain.s.sol new file mode 100644 index 000000000..b1c8ae292 --- /dev/null +++ b/l1-contracts/deploy-scripts/RegisterZKChain.s.sol @@ -0,0 +1,491 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-custom-errors, reason-string + +import {Script, console2 as console} from "forge-std/Script.sol"; +import {Vm} from "forge-std/Vm.sol"; +import {stdToml} from "forge-std/StdToml.sol"; + +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; +import {Ownable} from "@openzeppelin/contracts-v4/access/Ownable.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {ValidatorTimelock} from "contracts/state-transition/ValidatorTimelock.sol"; +import {Governance} from "contracts/governance/Governance.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {Utils} from "./Utils.sol"; +import {L2ContractsBytecodesLib} from "./L2ContractsBytecodesLib.sol"; +import {PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {L1NullifierDev} from "contracts/dev-contracts/L1NullifierDev.sol"; +import {L2SharedBridgeLegacy} from "contracts/bridge/L2SharedBridgeLegacy.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; + +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; + +// solhint-disable-next-line gas-struct-packing +struct Config { + address deployerAddress; + address ownerAddress; + uint256 chainChainId; + bool validiumMode; + uint256 bridgehubCreateNewChainSalt; + address validatorSenderOperatorCommitEth; + address validatorSenderOperatorBlobsEth; + address baseToken; + bytes32 baseTokenAssetId; + uint128 baseTokenGasPriceMultiplierNominator; + uint128 baseTokenGasPriceMultiplierDenominator; + address bridgehub; + // TODO(EVM-744): maybe rename to asset router + address sharedBridgeProxy; + address nativeTokenVault; + address chainTypeManagerProxy; + address validatorTimelock; + bytes diamondCutData; + bytes forceDeployments; + address governanceSecurityCouncilAddress; + uint256 governanceMinDelay; + address l1Nullifier; +} + +contract RegisterZKChainScript is Script { + using stdToml for string; + + address internal constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; + bytes32 internal constant STATE_TRANSITION_NEW_CHAIN_HASH = keccak256("NewZKChain(uint256,address)"); + + struct Output { + address governance; + address diamondProxy; + address chainAdmin; + address l2LegacySharedBridge; + address accessControlRestrictionAddress; + address chainProxyAdmin; + } + + struct LegacySharedBridgeParams { + bytes implementationConstructorParams; + address implementationAddress; + bytes proxyConstructorParams; + address proxyAddress; + } + + LegacySharedBridgeParams internal legacySharedBridgeParams; + + Config internal config; + Output internal output; + + function run() public { + console.log("Deploying ZKChain"); + + initializeConfig(); + // TODO: some chains may not want to have a legacy shared bridge + runInner("/script-out/output-register-zk-chain.toml", true); + } + + function runForTest() public { + console.log("Deploying ZKChain"); + + initializeConfigTest(); + // TODO: Yes, it is the same as for prod since it is never read from down the line + runInner(vm.envString("ZK_CHAIN_OUT"), false); + } + + function runInner(string memory outputPath, bool initializeL2LegacyBridge) internal { + string memory root = vm.projectRoot(); + outputPath = string.concat(root, outputPath); + + if (initializeL2LegacyBridge) { + // This must be run before the chain is deployed + setUpLegacySharedBridgeParams(); + } + + deployGovernance(); + deployChainAdmin(); + deployChainProxyAddress(); + checkTokenAddress(); + registerAssetIdOnBridgehub(); + registerTokenOnNTV(); + registerZKChain(); + addValidators(); + configureZkSyncStateTransition(); + setPendingAdmin(); + + if (initializeL2LegacyBridge) { + deployLegacySharedBridge(); + } + + saveOutput(outputPath); + } + + function initializeConfig() internal { + // Grab config from output of l1 deployment + string memory root = vm.projectRoot(); + string memory path = string.concat(root, "/script-config/register-zk-chain.toml"); + string memory toml = vm.readFile(path); + + config.deployerAddress = msg.sender; + + // Config file must be parsed key by key, otherwise values returned + // are parsed alfabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + + config.bridgehub = toml.readAddress("$.deployed_addresses.bridgehub.bridgehub_proxy_addr"); + config.chainTypeManagerProxy = toml.readAddress( + "$.deployed_addresses.state_transition.chain_type_manager_proxy_addr" + ); + config.validatorTimelock = toml.readAddress("$.deployed_addresses.validator_timelock_addr"); + // config.bridgehubGovernance = toml.readAddress("$.deployed_addresses.governance_addr"); + config.nativeTokenVault = toml.readAddress("$.deployed_addresses.native_token_vault_addr"); + config.sharedBridgeProxy = toml.readAddress("$.deployed_addresses.bridges.shared_bridge_proxy_addr"); + config.l1Nullifier = toml.readAddress("$.deployed_addresses.bridges.l1_nullifier_proxy_addr"); + + config.diamondCutData = toml.readBytes("$.contracts_config.diamond_cut_data"); + config.forceDeployments = toml.readBytes("$.contracts_config.force_deployments_data"); + + config.ownerAddress = toml.readAddress("$.owner_address"); + + config.chainChainId = toml.readUint("$.chain.chain_chain_id"); + config.baseTokenGasPriceMultiplierNominator = uint128( + toml.readUint("$.chain.base_token_gas_price_multiplier_nominator") + ); + config.baseTokenGasPriceMultiplierDenominator = uint128( + toml.readUint("$.chain.base_token_gas_price_multiplier_denominator") + ); + config.baseToken = toml.readAddress("$.chain.base_token_addr"); + config.governanceSecurityCouncilAddress = toml.readAddress("$.chain.governance_security_council_address"); + config.governanceMinDelay = uint256(toml.readUint("$.chain.governance_min_delay")); + config.bridgehubCreateNewChainSalt = toml.readUint("$.chain.bridgehub_create_new_chain_salt"); + config.validiumMode = toml.readBool("$.chain.validium_mode"); + config.validatorSenderOperatorCommitEth = toml.readAddress("$.chain.validator_sender_operator_commit_eth"); + config.validatorSenderOperatorBlobsEth = toml.readAddress("$.chain.validator_sender_operator_blobs_eth"); + } + + function getConfig() public view returns (Config memory) { + return config; + } + + function initializeConfigTest() internal { + // Grab config from output of l1 deployment + string memory root = vm.projectRoot(); + string memory path = string.concat(root, vm.envString("L1_OUTPUT")); //"/script-config/register-zkChain.toml"); + string memory toml = vm.readFile(path); + + config.deployerAddress = msg.sender; + + // Config file must be parsed key by key, otherwise values returned + // are parsed alfabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + + config.bridgehub = toml.readAddress("$.deployed_addresses.bridgehub.bridgehub_proxy_addr"); + // TODO(EVM-744): name of the key is a bit inconsistent + config.chainTypeManagerProxy = toml.readAddress( + "$.deployed_addresses.state_transition.state_transition_proxy_addr" + ); + config.validatorTimelock = toml.readAddress("$.deployed_addresses.validator_timelock_addr"); + // config.bridgehubGovernance = toml.readAddress("$.deployed_addresses.governance_addr"); + config.nativeTokenVault = toml.readAddress("$.deployed_addresses.native_token_vault_addr"); + config.sharedBridgeProxy = toml.readAddress("$.deployed_addresses.bridges.shared_bridge_proxy_addr"); + config.l1Nullifier = toml.readAddress("$.deployed_addresses.bridges.l1_nullifier_proxy_addr"); + + config.diamondCutData = toml.readBytes("$.contracts_config.diamond_cut_data"); + config.forceDeployments = toml.readBytes("$.contracts_config.force_deployments_data"); + + path = string.concat(root, vm.envString("ZK_CHAIN_CONFIG")); + toml = vm.readFile(path); + + config.ownerAddress = toml.readAddress("$.owner_address"); + + config.chainChainId = toml.readUint("$.chain.chain_chain_id"); + config.bridgehubCreateNewChainSalt = toml.readUint("$.chain.bridgehub_create_new_chain_salt"); + config.baseToken = toml.readAddress("$.chain.base_token_addr"); + config.validiumMode = toml.readBool("$.chain.validium_mode"); + config.validatorSenderOperatorCommitEth = toml.readAddress("$.chain.validator_sender_operator_commit_eth"); + config.validatorSenderOperatorBlobsEth = toml.readAddress("$.chain.validator_sender_operator_blobs_eth"); + config.baseTokenGasPriceMultiplierNominator = uint128( + toml.readUint("$.chain.base_token_gas_price_multiplier_nominator") + ); + config.baseTokenGasPriceMultiplierDenominator = uint128( + toml.readUint("$.chain.base_token_gas_price_multiplier_denominator") + ); + config.governanceMinDelay = uint256(toml.readUint("$.chain.governance_min_delay")); + config.governanceSecurityCouncilAddress = toml.readAddress("$.chain.governance_security_council_address"); + } + + function getOwnerAddress() public view returns (address) { + return config.ownerAddress; + } + + function checkTokenAddress() internal view { + if (config.baseToken == address(0)) { + revert("Token address is not set"); + } + + // Check if it's ethereum address + if (config.baseToken == ADDRESS_ONE) { + return; + } + + if (config.baseToken.code.length == 0) { + revert("Token address is not a contract address"); + } + + console.log("Using base token address:", config.baseToken); + } + + function setUpLegacySharedBridgeParams() internal { + bytes memory implementationConstructorParams = hex""; + + address legacyBridgeImplementationAddress = L2ContractHelper.computeCreate2Address( + msg.sender, + "", + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readL2LegacySharedBridgeBytecode()), + keccak256(implementationConstructorParams) + ); + + bytes memory proxyInitializationParams = abi.encodeCall( + L2SharedBridgeLegacy.initialize, + ( + config.sharedBridgeProxy, + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readBeaconProxyBytecode()), + // This is not exactly correct, this should be ecosystem governance and not chain governance + msg.sender + ) + ); + + bytes memory proxyConstructorParams = abi.encode( + legacyBridgeImplementationAddress, + // In real production, this would be aliased ecosystem governance. + // But in real production we also do not initialize legacy shared bridge + msg.sender, + proxyInitializationParams + ); + + address proxyAddress = L2ContractHelper.computeCreate2Address( + msg.sender, + "", + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readTransparentUpgradeableProxyBytecode()), + keccak256(proxyConstructorParams) + ); + + vm.broadcast(); + L1NullifierDev(config.l1Nullifier).setL2LegacySharedBridge(config.chainChainId, proxyAddress); + + legacySharedBridgeParams = LegacySharedBridgeParams({ + implementationConstructorParams: implementationConstructorParams, + implementationAddress: legacyBridgeImplementationAddress, + proxyConstructorParams: proxyConstructorParams, + proxyAddress: proxyAddress + }); + } + + function registerAssetIdOnBridgehub() internal { + IBridgehub bridgehub = IBridgehub(config.bridgehub); + Ownable ownable = Ownable(config.bridgehub); + bytes32 baseTokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, config.baseToken); + + if (bridgehub.assetIdIsRegistered(baseTokenAssetId)) { + console.log("Base token asset id already registered on Bridgehub"); + } else { + bytes memory data = abi.encodeCall(bridgehub.addTokenAssetId, (baseTokenAssetId)); + Utils.executeUpgrade({ + _governor: ownable.owner(), + _salt: bytes32(config.bridgehubCreateNewChainSalt), + _target: config.bridgehub, + _data: data, + _value: 0, + _delay: 0 + }); + console.log("Base token asset id registered on Bridgehub"); + } + } + + function registerTokenOnNTV() internal { + INativeTokenVault ntv = INativeTokenVault(config.nativeTokenVault); + // Ownable ownable = Ownable(config.nativeTokenVault); + bytes32 baseTokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, config.baseToken); + config.baseTokenAssetId = baseTokenAssetId; + if (ntv.tokenAddress(baseTokenAssetId) != address(0) || config.baseToken == ETH_TOKEN_ADDRESS) { + console.log("Token already registered on NTV"); + } else { + // bytes memory data = abi.encodeCall(ntv.registerToken, (config.baseToken)); + vm.broadcast(); + ntv.registerToken(config.baseToken); + console.log("Token registered on NTV"); + } + } + + function deployGovernance() internal { + vm.broadcast(); + Governance governance = new Governance( + config.ownerAddress, + config.governanceSecurityCouncilAddress, + config.governanceMinDelay + ); + console.log("Governance deployed at:", address(governance)); + output.governance = address(governance); + } + + function deployChainAdmin() internal { + vm.broadcast(); + AccessControlRestriction restriction = new AccessControlRestriction(0, config.ownerAddress); + output.accessControlRestrictionAddress = address(restriction); + + address[] memory restrictions = new address[](1); + restrictions[0] = address(restriction); + + vm.broadcast(); + ChainAdmin chainAdmin = new ChainAdmin(restrictions); + output.chainAdmin = address(chainAdmin); + } + + function registerZKChain() internal { + IBridgehub bridgehub = IBridgehub(config.bridgehub); + Ownable ownable = Ownable(config.bridgehub); + + vm.recordLogs(); + bytes memory data = abi.encodeCall( + bridgehub.createNewChain, + ( + config.chainChainId, + config.chainTypeManagerProxy, + config.baseTokenAssetId, + config.bridgehubCreateNewChainSalt, + msg.sender, + abi.encode(config.diamondCutData, config.forceDeployments), + getFactoryDeps() + ) + ); + Utils.executeUpgrade({ + _governor: ownable.owner(), + _salt: bytes32(config.bridgehubCreateNewChainSalt), + _target: config.bridgehub, + _data: data, + _value: 0, + _delay: 0 + }); + console.log("ZK chain registered"); + + // Get new diamond proxy address from emitted events + Vm.Log[] memory logs = vm.getRecordedLogs(); + address diamondProxyAddress; + uint256 logsLength = logs.length; + for (uint256 i = 0; i < logsLength; ++i) { + if (logs[i].topics[0] == STATE_TRANSITION_NEW_CHAIN_HASH) { + diamondProxyAddress = address(uint160(uint256(logs[i].topics[2]))); + break; + } + } + if (diamondProxyAddress == address(0)) { + revert("Diamond proxy address not found"); + } + output.diamondProxy = diamondProxyAddress; + console.log("ZKChain diamond proxy deployed at:", diamondProxyAddress); + } + + function addValidators() internal { + ValidatorTimelock validatorTimelock = ValidatorTimelock(config.validatorTimelock); + + vm.startBroadcast(msg.sender); + validatorTimelock.addValidator(config.chainChainId, config.validatorSenderOperatorCommitEth); + validatorTimelock.addValidator(config.chainChainId, config.validatorSenderOperatorBlobsEth); + vm.stopBroadcast(); + + console.log("Validators added"); + } + + function configureZkSyncStateTransition() internal { + IZKChain zkChain = IZKChain(output.diamondProxy); + + vm.startBroadcast(msg.sender); + zkChain.setTokenMultiplier( + config.baseTokenGasPriceMultiplierNominator, + config.baseTokenGasPriceMultiplierDenominator + ); + + if (config.validiumMode) { + zkChain.setPubdataPricingMode(PubdataPricingMode.Validium); + } + + vm.stopBroadcast(); + console.log("ZkSync State Transition configured"); + } + + function setPendingAdmin() internal { + IZKChain zkChain = IZKChain(output.diamondProxy); + + vm.startBroadcast(msg.sender); + zkChain.setPendingAdmin(output.chainAdmin); + vm.stopBroadcast(); + console.log("Owner for ", output.diamondProxy, "set to", output.chainAdmin); + } + + function deployChainProxyAddress() internal { + vm.startBroadcast(); + ProxyAdmin proxyAdmin = new ProxyAdmin(); + proxyAdmin.transferOwnership(output.chainAdmin); + vm.stopBroadcast(); + console.log("Transparent Proxy Admin deployed at:", address(proxyAdmin)); + output.chainProxyAdmin = address(proxyAdmin); + } + + function deployLegacySharedBridge() internal { + bytes[] memory emptyDeps = new bytes[](0); + address correctLegacyBridgeImplAddr = Utils.deployThroughL1({ + bytecode: L2ContractsBytecodesLib.readL2LegacySharedBridgeBytecode(), + constructorargs: legacySharedBridgeParams.implementationConstructorParams, + create2salt: "", + l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, + factoryDeps: emptyDeps, + chainId: config.chainChainId, + bridgehubAddress: config.bridgehub, + l1SharedBridgeProxy: config.sharedBridgeProxy + }); + + address correctProxyAddress = Utils.deployThroughL1({ + bytecode: L2ContractsBytecodesLib.readTransparentUpgradeableProxyBytecode(), + constructorargs: legacySharedBridgeParams.proxyConstructorParams, + create2salt: "", + l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, + factoryDeps: emptyDeps, + chainId: config.chainChainId, + bridgehubAddress: config.bridgehub, + l1SharedBridgeProxy: config.sharedBridgeProxy + }); + + require( + correctLegacyBridgeImplAddr == legacySharedBridgeParams.implementationAddress, + "Legacy bridge implementation address mismatch" + ); + require(correctProxyAddress == legacySharedBridgeParams.proxyAddress, "Legacy bridge proxy address mismatch"); + + output.l2LegacySharedBridge = correctProxyAddress; + } + + function getFactoryDeps() internal view returns (bytes[] memory) { + bytes[] memory factoryDeps = new bytes[](3); + factoryDeps[0] = L2ContractsBytecodesLib.readBeaconProxyBytecode(); + factoryDeps[1] = L2ContractsBytecodesLib.readStandardERC20Bytecode(); + factoryDeps[2] = L2ContractsBytecodesLib.readUpgradeableBeaconBytecode(); + return factoryDeps; + } + + function saveOutput(string memory outputPath) internal { + vm.serializeAddress("root", "diamond_proxy_addr", output.diamondProxy); + vm.serializeAddress("root", "chain_admin_addr", output.chainAdmin); + vm.serializeAddress("root", "l2_legacy_shared_bridge_addr", output.l2LegacySharedBridge); + vm.serializeAddress("root", "access_control_restriction_addr", output.accessControlRestrictionAddress); + vm.serializeAddress("root", "chain_proxy_admin_addr", output.chainProxyAdmin); + + string memory toml = vm.serializeAddress("root", "governance_addr", output.governance); + string memory root = vm.projectRoot(); + vm.writeToml(toml, outputPath); + console.log("Output saved at:", outputPath); + } +} diff --git a/l1-contracts/deploy-scripts/Utils.sol b/l1-contracts/deploy-scripts/Utils.sol index 4f5892ef2..4b887ef29 100644 --- a/l1-contracts/deploy-scripts/Utils.sol +++ b/l1-contracts/deploy-scripts/Utils.sol @@ -1,12 +1,17 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; +// solhint-disable gas-custom-errors, reason-string + import {Vm} from "forge-std/Vm.sol"; +import {console2 as console} from "forge-std/Script.sol"; import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; -import {L2TransactionRequestDirect} from "contracts/bridgehub/IBridgehub.sol"; +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; import {IGovernance} from "contracts/governance/IGovernance.sol"; -import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; +import {Ownable} from "@openzeppelin/contracts-v4/access/Ownable.sol"; +import {Call} from "contracts/governance/Common.sol"; import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "contracts/common/Config.sol"; import {L2_DEPLOYER_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; @@ -16,6 +21,7 @@ import {IProtocolUpgradeHandler} from "./interfaces/IProtocolUpgradeHandler.sol" import {IEmergencyUpgrageBoard} from "./interfaces/IEmergencyUpgrageBoard.sol"; import {IMultisig} from "./interfaces/IMultisig.sol"; import {ISafe} from "./interfaces/ISafe.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; /// @dev EIP-712 TypeHash for the emergency protocol upgrade execution approved by the guardians. bytes32 constant EXECUTE_EMERGENCY_UPGRADE_GUARDIANS_TYPEHASH = keccak256( @@ -32,6 +38,48 @@ bytes32 constant EXECUTE_EMERGENCY_UPGRADE_ZK_FOUNDATION_TYPEHASH = keccak256( "ExecuteEmergencyUpgradeZKFoundation(bytes32 id)" ); +/// @dev The offset from which the built-in, but user space contracts are located. +uint160 constant USER_CONTRACTS_OFFSET = 0x10000; // 2^16 + +// address constant +address constant L2_BRIDGEHUB_ADDRESS = address(USER_CONTRACTS_OFFSET + 0x02); +address constant L2_ASSET_ROUTER_ADDRESS = address(USER_CONTRACTS_OFFSET + 0x03); +address constant L2_NATIVE_TOKEN_VAULT_ADDRESS = address(USER_CONTRACTS_OFFSET + 0x04); +address constant L2_MESSAGE_ROOT_ADDRESS = address(USER_CONTRACTS_OFFSET + 0x05); + +address constant L2_CREATE2_FACTORY_ADDRESS = address(USER_CONTRACTS_OFFSET); + +// solhint-disable-next-line gas-struct-packing +struct StateTransitionDeployedAddresses { + address chainTypeManagerProxy; + address chainTypeManagerImplementation; + address dualVerifier; + address plonkVerifier; + address fflonkVerifier; + address adminFacet; + address mailboxFacet; + address executorFacet; + address gettersFacet; + address diamondInit; + address genesisUpgrade; + address defaultUpgrade; + address validatorTimelock; + address diamondProxy; +} + +/// @dev We need to use a struct instead of list of params to prevent stack too deep error +struct PrepareL1L2TransactionParams { + uint256 l1GasPrice; + bytes l2Calldata; + uint256 l2GasLimit; + uint256 l2Value; + bytes[] factoryDeps; + address dstAddress; + uint256 chainId; + address bridgehubAddress; + address l1SharedBridgeProxy; +} + library Utils { // Cheatcodes address, 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D. address internal constant VM_ADDRESS = address(uint160(uint256(keccak256("hevm cheat code")))); @@ -41,8 +89,8 @@ library Utils { bytes internal constant CREATE2_FACTORY_BYTECODE = hex"604580600e600039806000f350fe7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe03601600081602082378035828234f58015156039578182fd5b8082525050506014600cf3"; - address constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; - uint256 constant MAX_PRIORITY_TX_GAS = 72000000; + address internal constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; + uint256 internal constant MAX_PRIORITY_TX_GAS = 72000000; /** * @dev Get all selectors from the bytecode. @@ -61,12 +109,13 @@ library Utils { // Extract selectors from the result string[] memory parts = vm.split(stringResult, "\n"); - bytes4[] memory selectors = new bytes4[](parts.length); - for (uint256 i = 0; i < parts.length; i++) { + uint256 partsLength = parts.length; + bytes4[] memory selectors = new bytes4[](partsLength); + for (uint256 i = 0; i < partsLength; ++i) { bytes memory part = bytes(parts[i]); bytes memory extractedSelector = new bytes(10); // Selector length 10 is 0x + 4 bytes - for (uint256 j = 0; j < 10; j++) { + for (uint256 j = 0; j < 10; ++j) { extractedSelector[j] = part[j]; } bytes4 selector = bytes4(vm.parseBytes(string(extractedSelector))); @@ -75,7 +124,8 @@ library Utils { // Remove `getName()` selector if existing bool hasGetName = false; - for (uint256 i = 0; i < selectors.length; i++) { + uint256 selectorsLength = selectors.length; + for (uint256 i = 0; i < selectorsLength; ++i) { if (selectors[i] == bytes4(keccak256("getName()"))) { selectors[i] = selectors[selectors.length - 1]; hasGetName = true; @@ -83,8 +133,8 @@ library Utils { } } if (hasGetName) { - bytes4[] memory newSelectors = new bytes4[](selectors.length - 1); - for (uint256 i = 0; i < selectors.length - 1; i++) { + bytes4[] memory newSelectors = new bytes4[](selectorsLength - 1); + for (uint256 i = 0; i < selectorsLength - 1; ++i) { newSelectors[i] = selectors[i]; } return newSelectors; @@ -93,6 +143,13 @@ library Utils { return selectors; } + function getAllSelectorsForFacet(string memory facetName) internal returns (bytes4[] memory) { + // TODO(EVM-746): use forge to read the bytecode + string memory path = string.concat("/../l1-contracts/out/", facetName, ".sol/", facetName, "Facet.json"); + bytes memory bytecode = readFoundryDeployedBytecode(path); + return getAllSelectors(bytecode); + } + /** * @dev Extract an address from bytes. */ @@ -107,10 +164,11 @@ library Utils { */ function bytesToUint256(bytes memory bys) internal pure returns (uint256 value) { // Add left padding to 32 bytes if needed - if (bys.length < 32) { + uint256 bysLength = bys.length; + if (bysLength < 32) { bytes memory padded = new bytes(32); - for (uint256 i = 0; i < bys.length; i++) { - padded[i + 32 - bys.length] = bys[i]; + for (uint256 i = 0; i < bysLength; ++i) { + padded[i + 32 - bysLength] = bys[i]; } bys = padded; } @@ -127,6 +185,18 @@ library Utils { return vm.readFileBinary("../system-contracts/bootloader/build/artifacts/proved_batch.yul.zbin"); } + /** + * @dev Read hardhat bytecodes + */ + function readHardhatBytecode(string memory artifactPath) internal view returns (bytes memory) { + string memory root = vm.projectRoot(); + string memory path = string.concat(root, artifactPath); + console.log(path); + string memory json = vm.readFile(path); + bytes memory bytecode = vm.parseJsonBytes(json, ".bytecode"); + return bytecode; + } + /** * @dev Returns the bytecode of a given system contract. */ @@ -197,14 +267,7 @@ library Utils { address bridgehubAddress, address l1SharedBridgeProxy ) internal returns (address) { - bytes32 bytecodeHash = L2ContractHelper.hashL2Bytecode(bytecode); - - bytes memory deployData = abi.encodeWithSignature( - "create2(bytes32,bytes32,bytes)", - create2salt, - bytecodeHash, - constructorargs - ); + (bytes32 bytecodeHash, bytes memory deployData) = getDeploymentCalldata(create2salt, bytecode, constructorargs); address contractAddress = L2ContractHelper.computeCreate2Address( msg.sender, @@ -213,16 +276,12 @@ library Utils { keccak256(constructorargs) ); - bytes[] memory _factoryDeps = new bytes[](factoryDeps.length + 1); - - for (uint256 i = 0; i < factoryDeps.length; i++) { - _factoryDeps[i] = factoryDeps[i]; - } - _factoryDeps[factoryDeps.length] = bytecode; + bytes[] memory _factoryDeps = appendArray(factoryDeps, bytecode); runL1L2Transaction({ l2Calldata: deployData, l2GasLimit: l2GasLimit, + l2Value: 0, factoryDeps: _factoryDeps, dstAddress: L2_DEPLOYER_SYSTEM_CONTRACT_ADDR, chainId: chainId, @@ -232,39 +291,161 @@ library Utils { return contractAddress; } + function getL2AddressViaCreate2Factory( + bytes32 create2Salt, + bytes32 bytecodeHash, + bytes memory constructorArgs + ) internal view returns (address) { + return + L2ContractHelper.computeCreate2Address( + L2_CREATE2_FACTORY_ADDRESS, + create2Salt, + bytecodeHash, + keccak256(constructorArgs) + ); + } + + function getDeploymentCalldata( + bytes32 create2Salt, + bytes memory bytecode, + bytes memory constructorArgs + ) internal view returns (bytes32 bytecodeHash, bytes memory data) { + bytecodeHash = L2ContractHelper.hashL2Bytecode(bytecode); + + data = abi.encodeWithSignature("create2(bytes32,bytes32,bytes)", create2Salt, bytecodeHash, constructorArgs); + } + + function appendArray(bytes[] memory array, bytes memory element) internal pure returns (bytes[] memory) { + uint256 arrayLength = array.length; + bytes[] memory newArray = new bytes[](arrayLength + 1); + for (uint256 i = 0; i < arrayLength; ++i) { + newArray[i] = array[i]; + } + newArray[arrayLength] = element; + return newArray; + } + /** - * @dev Run the l2 l1 transaction + * @dev Deploy l2 contracts through l1, while using built-in L2 Create2Factory contract. */ - function runL1L2Transaction( - bytes memory l2Calldata, + function deployThroughL1Deterministic( + bytes memory bytecode, + bytes memory constructorargs, + bytes32 create2salt, uint256 l2GasLimit, bytes[] memory factoryDeps, - address dstAddress, uint256 chainId, address bridgehubAddress, address l1SharedBridgeProxy - ) internal { + ) internal returns (address) { + (bytes32 bytecodeHash, bytes memory deployData) = getDeploymentCalldata(create2salt, bytecode, constructorargs); + + address contractAddress = getL2AddressViaCreate2Factory(create2salt, bytecodeHash, constructorargs); + + bytes[] memory _factoryDeps = appendArray(factoryDeps, bytecode); + + runL1L2Transaction({ + l2Calldata: deployData, + l2GasLimit: l2GasLimit, + l2Value: 0, + factoryDeps: _factoryDeps, + dstAddress: L2_CREATE2_FACTORY_ADDRESS, + chainId: chainId, + bridgehubAddress: bridgehubAddress, + l1SharedBridgeProxy: l1SharedBridgeProxy + }); + return contractAddress; + } + + function prepareL1L2Transaction( + PrepareL1L2TransactionParams memory params + ) internal returns (L2TransactionRequestDirect memory l2TransactionRequestDirect, uint256 requiredValueToDeploy) { + Bridgehub bridgehub = Bridgehub(params.bridgehubAddress); + + requiredValueToDeploy = + bridgehub.l2TransactionBaseCost( + params.chainId, + params.l1GasPrice, + params.l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ) * + 2 + + params.l2Value; + + l2TransactionRequestDirect = L2TransactionRequestDirect({ + chainId: params.chainId, + mintValue: requiredValueToDeploy, + l2Contract: params.dstAddress, + l2Value: params.l2Value, + l2Calldata: params.l2Calldata, + l2GasLimit: params.l2GasLimit, + l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + factoryDeps: params.factoryDeps, + refundRecipient: msg.sender + }); + } + + function prepareL1L2TransactionTwoBridges( + uint256 l1GasPrice, + uint256 l2GasLimit, + uint256 chainId, + address bridgehubAddress, + address secondBridgeAddress, + uint256 secondBridgeValue, + bytes memory secondBridgeCalldata + ) + internal + returns (L2TransactionRequestTwoBridgesOuter memory l2TransactionRequest, uint256 requiredValueToDeploy) + { Bridgehub bridgehub = Bridgehub(bridgehubAddress); - uint256 gasPrice = bytesToUint256(vm.rpc("eth_gasPrice", "[]")); - uint256 requiredValueToDeploy = bridgehub.l2TransactionBaseCost( - chainId, - gasPrice, - l2GasLimit, - REQUIRED_L2_GAS_PRICE_PER_PUBDATA - ) * 2; + requiredValueToDeploy = + bridgehub.l2TransactionBaseCost(chainId, l1GasPrice, l2GasLimit, REQUIRED_L2_GAS_PRICE_PER_PUBDATA) * + 2; - L2TransactionRequestDirect memory l2TransactionRequestDirect = L2TransactionRequestDirect({ + l2TransactionRequest = L2TransactionRequestTwoBridgesOuter({ chainId: chainId, mintValue: requiredValueToDeploy, - l2Contract: dstAddress, l2Value: 0, - l2Calldata: l2Calldata, l2GasLimit: l2GasLimit, l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, - factoryDeps: factoryDeps, - refundRecipient: msg.sender + refundRecipient: msg.sender, + secondBridgeAddress: secondBridgeAddress, + secondBridgeValue: secondBridgeValue, + secondBridgeCalldata: secondBridgeCalldata }); + } + + /** + * @dev Run the l2 l1 transaction + */ + function runL1L2Transaction( + bytes memory l2Calldata, + uint256 l2GasLimit, + uint256 l2Value, + bytes[] memory factoryDeps, + address dstAddress, + uint256 chainId, + address bridgehubAddress, + address l1SharedBridgeProxy + ) internal { + Bridgehub bridgehub = Bridgehub(bridgehubAddress); + ( + L2TransactionRequestDirect memory l2TransactionRequestDirect, + uint256 requiredValueToDeploy + ) = prepareL1L2Transaction( + PrepareL1L2TransactionParams({ + l1GasPrice: bytesToUint256(vm.rpc("eth_gasPrice", "[]")), + l2Calldata: l2Calldata, + l2GasLimit: l2GasLimit, + l2Value: l2Value, + factoryDeps: factoryDeps, + dstAddress: dstAddress, + chainId: chainId, + bridgehubAddress: bridgehubAddress, + l1SharedBridgeProxy: l1SharedBridgeProxy + }) + ); address baseTokenAddress = bridgehub.baseToken(chainId); if (ADDRESS_ONE != baseTokenAddress) { @@ -278,6 +459,312 @@ library Utils { bridgehub.requestL2TransactionDirect{value: requiredValueToDeploy}(l2TransactionRequestDirect); } + function runGovernanceL1L2DirectTransaction( + uint256 l1GasPrice, + address governor, + bytes32 salt, + bytes memory l2Calldata, + uint256 l2GasLimit, + bytes[] memory factoryDeps, + address dstAddress, + uint256 chainId, + address bridgehubAddress, + address l1SharedBridgeProxy + ) internal returns (bytes32 txHash) { + ( + L2TransactionRequestDirect memory l2TransactionRequestDirect, + uint256 requiredValueToDeploy + ) = prepareL1L2Transaction( + PrepareL1L2TransactionParams({ + l1GasPrice: l1GasPrice, + l2Calldata: l2Calldata, + l2GasLimit: l2GasLimit, + l2Value: 0, + factoryDeps: factoryDeps, + dstAddress: dstAddress, + chainId: chainId, + bridgehubAddress: bridgehubAddress, + l1SharedBridgeProxy: l1SharedBridgeProxy + }) + ); + + requiredValueToDeploy = approveBaseTokenGovernance( + Bridgehub(bridgehubAddress), + l1SharedBridgeProxy, + governor, + salt, + chainId, + requiredValueToDeploy + ); + + bytes memory l2TransactionRequestDirectCalldata = abi.encodeCall( + Bridgehub.requestL2TransactionDirect, + (l2TransactionRequestDirect) + ); + + console.log("Executing transaction"); + vm.recordLogs(); + executeUpgrade(governor, salt, bridgehubAddress, l2TransactionRequestDirectCalldata, requiredValueToDeploy, 0); + Vm.Log[] memory logs = vm.getRecordedLogs(); + console.log("Transaction executed succeassfully! Extracting logs..."); + + address expectedDiamondProxyAddress = Bridgehub(bridgehubAddress).getHyperchain(chainId); + + txHash = extractPriorityOpFromLogs(expectedDiamondProxyAddress, logs); + + console.log("L2 Transaction hash is "); + console.logBytes32(txHash); + } + + function runGovernanceL1L2TwoBridgesTransaction( + uint256 l1GasPrice, + address governor, + bytes32 salt, + uint256 l2GasLimit, + uint256 chainId, + address bridgehubAddress, + address l1SharedBridgeProxy, + address secondBridgeAddress, + uint256 secondBridgeValue, + bytes memory secondBridgeCalldata + ) internal returns (bytes32 txHash) { + ( + L2TransactionRequestTwoBridgesOuter memory l2TransactionRequest, + uint256 requiredValueToDeploy + ) = prepareL1L2TransactionTwoBridges( + l1GasPrice, + l2GasLimit, + chainId, + bridgehubAddress, + secondBridgeAddress, + secondBridgeValue, + secondBridgeCalldata + ); + + requiredValueToDeploy = approveBaseTokenGovernance( + Bridgehub(bridgehubAddress), + l1SharedBridgeProxy, + governor, + salt, + chainId, + requiredValueToDeploy + ); + + bytes memory l2TransactionRequestCalldata = abi.encodeCall( + Bridgehub.requestL2TransactionTwoBridges, + (l2TransactionRequest) + ); + + console.log("Executing transaction"); + vm.recordLogs(); + executeUpgrade(governor, salt, bridgehubAddress, l2TransactionRequestCalldata, requiredValueToDeploy, 0); + Vm.Log[] memory logs = vm.getRecordedLogs(); + console.log("Transaction executed succeassfully! Extracting logs..."); + + address expectedDiamondProxyAddress = Bridgehub(bridgehubAddress).getHyperchain(chainId); + + txHash = extractPriorityOpFromLogs(expectedDiamondProxyAddress, logs); + + console.log("L2 Transaction hash is "); + console.logBytes32(txHash); + } + + function approveBaseTokenGovernance( + Bridgehub bridgehub, + address l1SharedBridgeProxy, + address governor, + bytes32 salt, + uint256 chainId, + uint256 amountToApprove + ) internal returns (uint256 ethAmountToPass) { + address baseTokenAddress = bridgehub.baseToken(chainId); + if (ADDRESS_ONE != baseTokenAddress) { + console.log("Base token not ETH, approving"); + IERC20 baseToken = IERC20(baseTokenAddress); + + bytes memory approvalCalldata = abi.encodeCall(baseToken.approve, (l1SharedBridgeProxy, amountToApprove)); + + executeUpgrade(governor, salt, address(baseToken), approvalCalldata, 0, 0); + + ethAmountToPass = 0; + } else { + console.log("Base token is ETH, no need to approve"); + ethAmountToPass = amountToApprove; + } + } + + function runAdminL1L2DirectTransaction( + uint256 gasPrice, + address admin, + address accessControlRestriction, + bytes memory l2Calldata, + uint256 l2GasLimit, + bytes[] memory factoryDeps, + address dstAddress, + uint256 chainId, + address bridgehubAddress, + address l1SharedBridgeProxy + ) internal returns (bytes32 txHash) { + ( + L2TransactionRequestDirect memory l2TransactionRequestDirect, + uint256 requiredValueToDeploy + ) = prepareL1L2Transaction( + PrepareL1L2TransactionParams({ + l1GasPrice: gasPrice, + l2Calldata: l2Calldata, + l2GasLimit: l2GasLimit, + l2Value: 0, + factoryDeps: factoryDeps, + dstAddress: dstAddress, + chainId: chainId, + bridgehubAddress: bridgehubAddress, + l1SharedBridgeProxy: l1SharedBridgeProxy + }) + ); + + requiredValueToDeploy = approveBaseTokenAdmin( + Bridgehub(bridgehubAddress), + l1SharedBridgeProxy, + admin, + accessControlRestriction, + chainId, + requiredValueToDeploy + ); + + bytes memory l2TransactionRequestDirectCalldata = abi.encodeCall( + Bridgehub.requestL2TransactionDirect, + (l2TransactionRequestDirect) + ); + + console.log("Executing transaction"); + vm.recordLogs(); + adminExecute( + admin, + accessControlRestriction, + bridgehubAddress, + l2TransactionRequestDirectCalldata, + requiredValueToDeploy + ); + Vm.Log[] memory logs = vm.getRecordedLogs(); + console.log("Transaction executed succeassfully! Extracting logs..."); + + address expectedDiamondProxyAddress = Bridgehub(bridgehubAddress).getHyperchain(chainId); + + txHash = extractPriorityOpFromLogs(expectedDiamondProxyAddress, logs); + + console.log("L2 Transaction hash is "); + console.logBytes32(txHash); + } + + function runAdminL1L2TwoBridgesTransaction( + uint256 l1GasPrice, + address admin, + address accessControlRestriction, + uint256 l2GasLimit, + uint256 chainId, + address bridgehubAddress, + address l1SharedBridgeProxy, + address secondBridgeAddress, + uint256 secondBridgeValue, + bytes memory secondBridgeCalldata + ) internal returns (bytes32 txHash) { + ( + L2TransactionRequestTwoBridgesOuter memory l2TransactionRequest, + uint256 requiredValueToDeploy + ) = prepareL1L2TransactionTwoBridges( + l1GasPrice, + l2GasLimit, + chainId, + bridgehubAddress, + secondBridgeAddress, + secondBridgeValue, + secondBridgeCalldata + ); + + requiredValueToDeploy = approveBaseTokenAdmin( + Bridgehub(bridgehubAddress), + l1SharedBridgeProxy, + admin, + accessControlRestriction, + chainId, + requiredValueToDeploy + ); + + bytes memory l2TransactionRequestCalldata = abi.encodeCall( + Bridgehub.requestL2TransactionTwoBridges, + (l2TransactionRequest) + ); + + console.log("Executing transaction"); + vm.recordLogs(); + adminExecute( + admin, + accessControlRestriction, + bridgehubAddress, + l2TransactionRequestCalldata, + requiredValueToDeploy + ); + Vm.Log[] memory logs = vm.getRecordedLogs(); + console.log("Transaction executed succeassfully! Extracting logs..."); + + address expectedDiamondProxyAddress = Bridgehub(bridgehubAddress).getHyperchain(chainId); + + txHash = extractPriorityOpFromLogs(expectedDiamondProxyAddress, logs); + + console.log("L2 Transaction hash is "); + console.logBytes32(txHash); + } + + function approveBaseTokenAdmin( + Bridgehub bridgehub, + address l1SharedBridgeProxy, + address admin, + address accessControlRestriction, + uint256 chainId, + uint256 amountToApprove + ) internal returns (uint256 ethAmountToPass) { + address baseTokenAddress = bridgehub.baseToken(chainId); + if (ADDRESS_ONE != baseTokenAddress) { + console.log("Base token not ETH, approving"); + IERC20 baseToken = IERC20(baseTokenAddress); + + bytes memory approvalCalldata = abi.encodeCall(baseToken.approve, (l1SharedBridgeProxy, amountToApprove)); + + adminExecute(admin, accessControlRestriction, address(baseToken), approvalCalldata, 0); + + ethAmountToPass = 0; + } else { + console.log("Base token is ETH, no need to approve"); + ethAmountToPass = amountToApprove; + } + } + + function extractPriorityOpFromLogs( + address expectedDiamondProxyAddress, + Vm.Log[] memory logs + ) internal pure returns (bytes32 txHash) { + // TODO(EVM-749): cleanup the constant and automate its derivation + bytes32 topic0 = bytes32(uint256(0x4531cd5795773d7101c17bdeb9f5ab7f47d7056017506f937083be5d6e77a382)); + + for (uint256 i = 0; i < logs.length; i++) { + if (logs[i].emitter == expectedDiamondProxyAddress && logs[i].topics[0] == topic0) { + if (txHash != bytes32(0)) { + revert("Multiple priority ops"); + } + + bytes memory data = logs[i].data; + assembly { + // Skip length + tx id + txHash := mload(add(data, 0x40)) + } + } + } + + if (txHash == bytes32(0)) { + revert("No priority op found"); + } + } + /** * @dev Publish bytecodes to l2 through l1 */ @@ -290,6 +777,7 @@ library Utils { runL1L2Transaction({ l2Calldata: "", l2GasLimit: MAX_PRIORITY_TX_GAS, + l2Value: 0, factoryDeps: factoryDeps, dstAddress: 0x0000000000000000000000000000000000000000, chainId: chainId, @@ -299,23 +787,25 @@ library Utils { } /** - * @dev Read hardhat bytecodes + * @dev Read foundry bytecodes */ - function readHardhatBytecode(string memory artifactPath) internal view returns (bytes memory) { + function readFoundryBytecode(string memory artifactPath) internal view returns (bytes memory) { string memory root = vm.projectRoot(); string memory path = string.concat(root, artifactPath); string memory json = vm.readFile(path); - bytes memory bytecode = vm.parseJsonBytes(json, ".bytecode"); + bytes memory bytecode = vm.parseJsonBytes(json, ".bytecode.object"); return bytecode; } - function chainAdminMulticall(address _chainAdmin, address _target, bytes memory _data, uint256 _value) internal { - IChainAdmin chainAdmin = IChainAdmin(_chainAdmin); - - IChainAdmin.Call[] memory calls = new IChainAdmin.Call[](1); - calls[0] = IChainAdmin.Call({target: _target, value: _value, data: _data}); - vm.broadcast(); - chainAdmin.multicall(calls, true); + /** + * @dev Read hardhat bytecodes + */ + function readFoundryDeployedBytecode(string memory artifactPath) internal view returns (bytes memory) { + string memory root = vm.projectRoot(); + string memory path = string.concat(root, artifactPath); + string memory json = vm.readFile(path); + bytes memory bytecode = vm.parseJsonBytes(json, ".deployedBytecode.object"); + return bytecode; } function executeUpgrade( @@ -327,9 +817,10 @@ library Utils { uint256 _delay ) internal { IGovernance governance = IGovernance(_governor); + Ownable ownable = Ownable(_governor); - IGovernance.Call[] memory calls = new IGovernance.Call[](1); - calls[0] = IGovernance.Call({target: _target, value: _value, data: _data}); + Call[] memory calls = new Call[](1); + calls[0] = Call({target: _target, value: _value, data: _data}); IGovernance.Operation memory operation = IGovernance.Operation({ calls: calls, @@ -337,7 +828,7 @@ library Utils { salt: _salt }); - vm.startBroadcast(); + vm.startBroadcast(ownable.owner()); governance.scheduleTransparent(operation, _delay); if (_delay == 0) { governance.execute{value: _value}(operation); @@ -457,4 +948,28 @@ library Utils { vm.stopBroadcast(); } } + + function adminExecute( + address _admin, + address _accessControlRestriction, + address _target, + bytes memory _data, + uint256 _value + ) internal { + address defaultAdmin = AccessControlRestriction(_accessControlRestriction).defaultAdmin(); + + Call[] memory calls = new Call[](1); + calls[0] = Call({target: _target, value: _value, data: _data}); + + vm.startBroadcast(defaultAdmin); + IChainAdmin(_admin).multicall{value: _value}(calls, true); + vm.stopBroadcast(); + } + + function readRollupDAValidatorBytecode() internal view returns (bytes memory bytecode) { + bytecode = readFoundryBytecode("/../da-contracts/out/RollupL1DAValidator.sol/RollupL1DAValidator.json"); + } + + // add this to be excluded from coverage report + function test() internal {} } diff --git a/l1-contracts/deploy-scripts/ZkSyncScriptErrors.sol b/l1-contracts/deploy-scripts/ZkSyncScriptErrors.sol new file mode 100644 index 000000000..76295d633 --- /dev/null +++ b/l1-contracts/deploy-scripts/ZkSyncScriptErrors.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +error FailedToDeploy(ZksyncContract); +error BytecodeNotSet(); +error FailedToDeployViaCreate2(); +error MissingAddress(ZksyncContract); +error AddressHasNoCode(address); +error MintFailed(); + +enum ZksyncContract { + Create2Factory, + DiamondProxy, + BaseToken +} diff --git a/l1-contracts/deploy-scripts/dev/SetupLegacyBridge.s.sol b/l1-contracts/deploy-scripts/dev/SetupLegacyBridge.s.sol index e178824b1..eaab24942 100644 --- a/l1-contracts/deploy-scripts/dev/SetupLegacyBridge.s.sol +++ b/l1-contracts/deploy-scripts/dev/SetupLegacyBridge.s.sol @@ -4,6 +4,7 @@ pragma solidity ^0.8.0; import {Script} from "forge-std/Script.sol"; import {stdToml} from "forge-std/StdToml.sol"; import {Utils} from "./../Utils.sol"; +import {L2ContractsBytecodesLib} from "../L2ContractsBytecodesLib.sol"; import {L1SharedBridge} from "contracts/bridge/L1SharedBridge.sol"; import {DummyL1ERC20Bridge} from "contracts/dev-contracts/DummyL1ERC20Bridge.sol"; import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol"; @@ -112,9 +113,7 @@ contract SetupLegacyBridge is Script { internal returns (address tokenBeaconAddress, bytes32 tokenBeaconBytecodeHash) { - bytes memory l2StandardTokenCode = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/contracts/bridge/L2StandardERC20.sol/L2StandardERC20.json" - ); + bytes memory l2StandardTokenCode = L2ContractsBytecodesLib.readStandardERC20Bytecode(); (address l2StandardToken, ) = calculateL2Create2Address( config.l2SharedBridgeAddress, l2StandardTokenCode, @@ -122,14 +121,10 @@ contract SetupLegacyBridge is Script { "" ); - bytes memory beaconProxy = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/@openzeppelin/contracts/proxy/beacon/BeaconProxy.sol/BeaconProxy.json" - ); + bytes memory beaconProxy = L2ContractsBytecodesLib.readBeaconProxyBytecode(); tokenBeaconBytecodeHash = L2ContractHelper.hashL2Bytecode(beaconProxy); - bytes memory upgradableBeacon = Utils.readHardhatBytecode( - "/../l2-contracts/artifacts-zk/@openzeppelin/contracts/proxy/beacon/UpgradeableBeacon.sol/UpgradeableBeacon.json" - ); + bytes memory upgradableBeacon = L2ContractsBytecodesLib.readUpgradeableBeaconBytecode(); (tokenBeaconAddress, ) = calculateL2Create2Address( config.l2SharedBridgeAddress, diff --git a/l1-contracts/deploy-scripts/upgrade/ChainUpgrade.s.sol b/l1-contracts/deploy-scripts/upgrade/ChainUpgrade.s.sol new file mode 100644 index 000000000..7df6863d3 --- /dev/null +++ b/l1-contracts/deploy-scripts/upgrade/ChainUpgrade.s.sol @@ -0,0 +1,212 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-custom-errors + +import {Script, console2 as console} from "forge-std/Script.sol"; +import {stdToml} from "forge-std/StdToml.sol"; +import {Utils, L2_BRIDGEHUB_ADDRESS, L2_ASSET_ROUTER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS, L2_MESSAGE_ROOT_ADDRESS} from "../Utils.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {L2ContractsBytecodesLib} from "../L2ContractsBytecodesLib.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {Call} from "contracts/governance/Common.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; + +interface LegacyChainAdmin { + function owner() external view returns (address); +} + +contract ChainUpgrade is Script { + using stdToml for string; + + struct ChainConfig { + address deployerAddress; + address ownerAddress; + uint256 chainChainId; + address chainDiamondProxyAddress; + bool validiumMode; + bool permanentRollup; + // FIXME: From ecosystem, maybe move to a different struct + address expectedRollupL2DAValidator; + address expectedL2GatewayUpgrade; + address expectedValidiumL2DAValidator; + address permanentRollupRestriction; + address bridgehubProxyAddress; + address oldSharedBridgeProxyAddress; + } + + struct Output { + address l2DAValidator; + address accessControlRestriction; + address chainAdmin; + } + + address currentChainAdmin; + ChainConfig config; + Output output; + + function prepareChain( + string memory ecosystemInputPath, + string memory ecosystemOutputPath, + string memory configPath, + string memory outputPath + ) public { + string memory root = vm.projectRoot(); + ecosystemInputPath = string.concat(root, ecosystemInputPath); + ecosystemOutputPath = string.concat(root, ecosystemOutputPath); + configPath = string.concat(root, configPath); + outputPath = string.concat(root, outputPath); + + initializeConfig(configPath, ecosystemInputPath, ecosystemOutputPath); + + checkCorrectOwnerAddress(); + // Preparation of chain consists of two parts: + // - Deploying l2 da validator + // - Deploying new chain admin + + deployNewL2DAValidator(); + deployL2GatewayUpgrade(); + deployNewChainAdmin(); + governanceMoveToNewChainAdmin(); + + saveOutput(outputPath); + } + + function upgradeChain(uint256 oldProtocolVersion, Diamond.DiamondCutData memory upgradeCutData) public { + Utils.adminExecute( + output.chainAdmin, + output.accessControlRestriction, + config.chainDiamondProxyAddress, + abi.encodeCall(IAdmin.upgradeChainFromVersion, (oldProtocolVersion, upgradeCutData)), + 0 + ); + } + + function initializeConfig( + string memory configPath, + string memory ecosystemInputPath, + string memory ecosystemOutputPath + ) internal { + config.deployerAddress = msg.sender; + + // Grab config from output of l1 deployment + string memory toml = vm.readFile(configPath); + + // Config file must be parsed key by key, otherwise values returned + // are parsed alfabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + + config.ownerAddress = toml.readAddress("$.owner_address"); + config.chainChainId = toml.readUint("$.chain.chain_id"); + config.validiumMode = toml.readBool("$.chain.validium_mode"); + config.chainDiamondProxyAddress = toml.readAddress("$.chain.diamond_proxy_address"); + config.permanentRollup = toml.readBool("$.chain.permanent_rollup"); + + toml = vm.readFile(ecosystemOutputPath); + + config.expectedRollupL2DAValidator = toml.readAddress("$.contracts_config.expected_rollup_l2_da_validator"); + config.expectedValidiumL2DAValidator = toml.readAddress("$.contracts_config.expected_validium_l2_da_validator"); + config.expectedL2GatewayUpgrade = toml.readAddress("$.contracts_config.expected_l2_gateway_upgrade"); + config.permanentRollupRestriction = toml.readAddress("$.deployed_addresses.permanent_rollup_restriction"); + + toml = vm.readFile(ecosystemInputPath); + + config.bridgehubProxyAddress = toml.readAddress("$.contracts.bridgehub_proxy_address"); + config.oldSharedBridgeProxyAddress = toml.readAddress("$.contracts.old_shared_bridge_proxy_address"); + } + + function checkCorrectOwnerAddress() internal { + currentChainAdmin = address(IZKChain(config.chainDiamondProxyAddress).getAdmin()); + address currentAdminOwner = LegacyChainAdmin(currentChainAdmin).owner(); + + require(currentAdminOwner == config.ownerAddress, "Only the owner of the chain admin can call this function"); + } + + function deployNewL2DAValidator() internal { + address expectedL2DAValidator = Utils.deployThroughL1Deterministic({ + // FIXME: for now this script only works with rollup chains + bytecode: L2ContractsBytecodesLib.readRollupL2DAValidatorBytecode(), + constructorargs: hex"", + create2salt: bytes32(0), + l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, + factoryDeps: new bytes[](0), + chainId: config.chainChainId, + bridgehubAddress: config.bridgehubProxyAddress, + l1SharedBridgeProxy: config.oldSharedBridgeProxyAddress + }); + // FIXME: for now this script only works with rollup chains + require(expectedL2DAValidator == config.expectedRollupL2DAValidator, "Invalid L2DAValidator address"); + + output.l2DAValidator = expectedL2DAValidator; + } + + function deployL2GatewayUpgrade() internal { + address expectedGatewayUpgrade = Utils.deployThroughL1Deterministic({ + bytecode: L2ContractsBytecodesLib.readGatewayUpgradeBytecode(), + constructorargs: hex"", + create2salt: bytes32(0), + l2GasLimit: Utils.MAX_PRIORITY_TX_GAS, + factoryDeps: new bytes[](0), + chainId: config.chainChainId, + bridgehubAddress: config.bridgehubProxyAddress, + l1SharedBridgeProxy: config.oldSharedBridgeProxyAddress + }); + require(expectedGatewayUpgrade == config.expectedL2GatewayUpgrade, "Invalid L2Gateway address"); + } + + function deployNewChainAdmin() internal { + AccessControlRestriction accessControlRestriction = new AccessControlRestriction(0, config.ownerAddress); + + address[] memory restrictions; + if (config.permanentRollup) { + restrictions = new address[](2); + restrictions[0] = address(accessControlRestriction); + restrictions[1] = config.permanentRollupRestriction; + } else { + restrictions = new address[](1); + restrictions[0] = address(accessControlRestriction); + } + + ChainAdmin newChainAdmin = new ChainAdmin(restrictions); + output.chainAdmin = address(newChainAdmin); + output.accessControlRestriction = address(accessControlRestriction); + } + + /// @dev The caller of this function needs to be the owner of the chain admin + /// of the + function governanceMoveToNewChainAdmin() internal { + // Firstly, we need to call the legacy chain admin to transfer the ownership to the new chain admin + Call[] memory calls = new Call[](1); + calls[0] = Call({ + target: config.chainDiamondProxyAddress, + value: 0, + data: abi.encodeCall(IAdmin.setPendingAdmin, (output.chainAdmin)) + }); + + vm.startBroadcast(config.ownerAddress); + ChainAdmin(payable(currentChainAdmin)).multicall(calls, true); + vm.stopBroadcast(); + + // Now we need to accept the adminship + Utils.adminExecute({ + _admin: output.chainAdmin, + _accessControlRestriction: output.accessControlRestriction, + _target: config.chainDiamondProxyAddress, + _data: abi.encodeCall(IAdmin.acceptAdmin, ()), + _value: 0 + }); + } + + function saveOutput(string memory outputPath) internal { + vm.serializeAddress("root", "l2_da_validator_addr", output.l2DAValidator); + vm.serializeAddress("root", "chain_admin_addr", output.chainAdmin); + + string memory toml = vm.serializeAddress("root", "access_control_restriction", output.accessControlRestriction); + string memory root = vm.projectRoot(); + vm.writeToml(toml, outputPath); + console.log("Output saved at:", outputPath); + } +} diff --git a/l1-contracts/deploy-scripts/upgrade/EcosystemUpgrade.s.sol b/l1-contracts/deploy-scripts/upgrade/EcosystemUpgrade.s.sol new file mode 100644 index 000000000..1fa365e01 --- /dev/null +++ b/l1-contracts/deploy-scripts/upgrade/EcosystemUpgrade.s.sol @@ -0,0 +1,1352 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-custom-errors + +import {Script, console2 as console} from "forge-std/Script.sol"; +import {stdToml} from "forge-std/StdToml.sol"; +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; +import {TransparentUpgradeableProxy, ITransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {Utils, L2_BRIDGEHUB_ADDRESS, L2_ASSET_ROUTER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS, L2_MESSAGE_ROOT_ADDRESS} from "../Utils.sol"; +import {Multicall3} from "contracts/dev-contracts/Multicall3.sol"; +import {Verifier} from "contracts/state-transition/Verifier.sol"; +import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; +import {VerifierParams, IVerifier} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; +import {DefaultUpgrade} from "contracts/upgrades/DefaultUpgrade.sol"; +import {Governance} from "contracts/governance/Governance.sol"; +import {L1GenesisUpgrade} from "contracts/upgrades/L1GenesisUpgrade.sol"; +import {GatewayUpgrade} from "contracts/upgrades/GatewayUpgrade.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {ValidatorTimelock} from "contracts/state-transition/ValidatorTimelock.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {CTMDeploymentTracker} from "contracts/bridgehub/CTMDeploymentTracker.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; +import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; +import {ChainTypeManager} from "contracts/state-transition/ChainTypeManager.sol"; +import {ChainTypeManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IChainTypeManager.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {InitializeDataNewChain as DiamondInitializeDataNewChain} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {L1ERC20Bridge} from "contracts/bridge/L1ERC20Bridge.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {AddressHasNoCode} from "../ZkSyncScriptErrors.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {IL2ContractDeployer} from "contracts/common/interfaces/IL2ContractDeployer.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {IL1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {L1NullifierDev} from "contracts/dev-contracts/L1NullifierDev.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {PermanentRestriction} from "contracts/governance/PermanentRestriction.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L2ContractsBytecodesLib} from "../L2ContractsBytecodesLib.sol"; +import {ValidiumL1DAValidator} from "contracts/state-transition/data-availability/ValidiumL1DAValidator.sol"; +import {Call} from "contracts/governance/Common.sol"; +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {ProposedUpgrade} from "contracts/upgrades/BaseZkSyncUpgrade.sol"; + +import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; + +import {L2_FORCE_DEPLOYER_ADDR, L2_COMPLEX_UPGRADER_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IComplexUpgrader} from "contracts/state-transition/l2-deps/IComplexUpgrader.sol"; +import {GatewayUpgradeEncodedInput} from "contracts/upgrades/GatewayUpgrade.sol"; +import {TransitionaryOwner} from "contracts/governance/TransitionaryOwner.sol"; + +struct FixedForceDeploymentsData { + uint256 l1ChainId; + uint256 eraChainId; + address l1AssetRouter; + bytes32 l2TokenProxyBytecodeHash; + address aliasedL1Governance; + uint256 maxNumberOfZKChains; + bytes32 bridgehubBytecodeHash; + bytes32 l2AssetRouterBytecodeHash; + bytes32 l2NtvBytecodeHash; + bytes32 messageRootBytecodeHash; + address l2SharedBridgeLegacyImpl; + address l2BridgedStandardERC20Impl; + address l2BridgeProxyOwnerAddress; + address l2BridgedStandardERC20ProxyOwnerAddress; +} + +// A subset of the ones used for tests +struct StateTransitionDeployedAddresses { + address chainTypeManagerImplementation; + address verifier; + address adminFacet; + address mailboxFacet; + address executorFacet; + address gettersFacet; + address diamondInit; + address genesisUpgrade; + address defaultUpgrade; + address validatorTimelock; +} + +contract EcosystemUpgrade is Script { + using stdToml for string; + + address internal constant ADDRESS_ONE = 0x0000000000000000000000000000000000000001; + address internal constant DETERMINISTIC_CREATE2_ADDRESS = 0x4e59b44847b379578588920cA78FbF26c0B4956C; + + // solhint-disable-next-line gas-struct-packing + struct DeployedAddresses { + BridgehubDeployedAddresses bridgehub; + StateTransitionDeployedAddresses stateTransition; + BridgesDeployedAddresses bridges; + L1NativeTokenVaultAddresses vaults; + DataAvailabilityDeployedAddresses daAddresses; + ExpectedL2Addresses expectedL2Addresses; + address chainAdmin; + address accessControlRestrictionAddress; + address permanentRollupRestriction; + address validatorTimelock; + address gatewayUpgrade; + address create2Factory; + address transitionaryOwner; + } + + struct ExpectedL2Addresses { + address expectedRollupL2DAValidator; + address expectedValidiumL2DAValidator; + address expectedL2GatewayUpgrade; + address l2SharedBridgeLegacyImpl; + address l2BridgedStandardERC20Impl; + // In reality, the following addresses need to be + // deployed only on a settlement layer, i.e. the Gateway. + address expectedL2ProxyAdminDeployer; + address expectedL2ProxyAdmin; + address expectedL2AdminFactory; + address expectedL2PermanentRestrictionImpl; + address expectedL2PermanentRestrictionProxy; + } + + // solhint-disable-next-line gas-struct-packing + struct L1NativeTokenVaultAddresses { + address l1NativeTokenVaultImplementation; + address l1NativeTokenVaultProxy; + } + + struct DataAvailabilityDeployedAddresses { + address l1RollupDAValidator; + address l1ValidiumDAValidator; + } + + // solhint-disable-next-line gas-struct-packing + struct BridgehubDeployedAddresses { + address bridgehubImplementation; + address ctmDeploymentTrackerImplementation; + address ctmDeploymentTrackerProxy; + address messageRootImplementation; + address messageRootProxy; + } + + // solhint-disable-next-line gas-struct-packing + struct BridgesDeployedAddresses { + address erc20BridgeImplementation; + address sharedBridgeProxy; + address sharedBridgeImplementation; + address l1NullifierImplementation; + address bridgedStandardERC20Implementation; + address bridgedTokenBeacon; + } + + // solhint-disable-next-line gas-struct-packing + struct Config { + uint256 l1ChainId; + address deployerAddress; + uint256 eraChainId; + address ownerAddress; + bool testnetVerifier; + ContractsConfig contracts; + TokensConfig tokens; + } + + // solhint-disable-next-line gas-struct-packing + struct GeneratedData { + bytes forceDeploymentsData; + bytes diamondCutData; + } + + // solhint-disable-next-line gas-struct-packing + struct ContractsConfig { + bytes32 create2FactorySalt; + address create2FactoryAddr; + uint256 validatorTimelockExecutionDelay; + bytes32 genesisRoot; + uint256 genesisRollupLeafIndex; + bytes32 genesisBatchCommitment; + uint256 latestProtocolVersion; + bytes32 recursionNodeLevelVkHash; + bytes32 recursionLeafLevelVkHash; + bytes32 recursionCircuitsSetVksHash; + uint256 priorityTxMaxGasLimit; + PubdataPricingMode diamondInitPubdataPricingMode; + uint256 diamondInitBatchOverheadL1Gas; + uint256 diamondInitMaxPubdataPerBatch; + uint256 diamondInitMaxL2GasPerBatch; + uint256 diamondInitPriorityTxMaxPubdata; + uint256 diamondInitMinimalL2GasPrice; + uint256 maxNumberOfChains; + bytes32 bootloaderHash; + bytes32 defaultAAHash; + address oldValidatorTimelock; + address legacyErc20BridgeAddress; + address bridgehubProxyAddress; + address oldSharedBridgeProxyAddress; + address stateTransitionManagerAddress; + address transparentProxyAdmin; + address eraDiamondProxy; + address blobVersionedHashRetriever; + address l2BridgeProxyOwnerAddress; + address l2BridgedStandardERC20ProxyOwnerAddress; + uint256 fflonkProofLength; + } + + struct TokensConfig { + address tokenWethAddress; + } + + Config internal config; + GeneratedData internal generatedData; + DeployedAddresses internal addresses; + + function prepareEcosystemContracts(string memory configPath, string memory outputPath) public { + string memory root = vm.projectRoot(); + configPath = string.concat(root, configPath); + outputPath = string.concat(root, outputPath); + + initializeConfig(configPath); + + instantiateCreate2Factory(); + + deployVerifier(); + deployDefaultUpgrade(); + deployGenesisUpgrade(); + deployGatewayUpgrade(); + + deployDAValidators(); + deployValidatorTimelock(); + + // TODO: restore + // deployChainAdmin(); + deployBridgehubImplementation(); + deployMessageRootContract(); + + deployL1NullifierContracts(); + deploySharedBridgeContracts(); + deployBridgedStandardERC20Implementation(); + deployBridgedTokenBeacon(); + deployL1NativeTokenVaultImplementation(); + deployL1NativeTokenVaultProxy(); + deployErc20BridgeImplementation(); + + deployCTMDeploymentTracker(); + + initializeGeneratedData(); + initializeExpectedL2Addresses(); + + deployChainTypeManagerContract(); + setChainTypeManagerInValidatorTimelock(); + + deployPermanentRollupRestriction(); + + deployTransitionaryOwner(); + + updateOwners(); + + saveOutput(outputPath); + } + + function run() public { + console.log("Deploying L1 contracts"); + } + + function provideAcceptOwnershipCalls() public returns (Call[] memory calls) { + console.log("Providing accept ownership calls"); + + calls = new Call[](4); + calls[0] = Call({ + target: addresses.permanentRollupRestriction, + data: abi.encodeCall(Ownable2StepUpgradeable.acceptOwnership, ()), + value: 0 + }); + calls[1] = Call({ + target: addresses.validatorTimelock, + data: abi.encodeCall(Ownable2StepUpgradeable.acceptOwnership, ()), + value: 0 + }); + calls[2] = Call({ + target: addresses.bridges.sharedBridgeProxy, + data: abi.encodeCall(Ownable2StepUpgradeable.acceptOwnership, ()), + value: 0 + }); + calls[3] = Call({ + target: addresses.bridgehub.ctmDeploymentTrackerProxy, + data: abi.encodeCall(Ownable2StepUpgradeable.acceptOwnership, ()), + value: 0 + }); + } + + function getOwnerAddress() public returns (address) { + return config.ownerAddress; + } + + function _getFacetCutsForDeletion() internal returns (Diamond.FacetCut[] memory facetCuts) { + IZKChain.Facet[] memory facets = IZKChain(config.contracts.eraDiamondProxy).facets(); + + // Freezability does not matter when deleting, so we just put false everywhere + facetCuts = new Diamond.FacetCut[](facets.length); + for (uint i = 0; i < facets.length; i++) { + facetCuts[i] = Diamond.FacetCut({ + facet: address(0), + action: Diamond.Action.Remove, + isFreezable: false, + selectors: facets[i].selectors + }); + } + } + + function _composeUpgradeTx() internal returns (L2CanonicalTransaction memory transaction) { + transaction = L2CanonicalTransaction({ + // FIXME: dont use hardcoded values + txType: 254, + from: uint256(uint160(L2_FORCE_DEPLOYER_ADDR)), + to: uint256(uint160(address(L2_COMPLEX_UPGRADER_ADDR))), + gasLimit: 72_000_000, + gasPerPubdataByteLimit: 800, + maxFeePerGas: 0, + maxPriorityFeePerGas: 0, + paymaster: uint256(uint160(address(0))), + nonce: 25, + value: 0, + reserved: [uint256(0), uint256(0), uint256(0), uint256(0)], + // Note, that the data is empty, it will be fully composed inside the `GatewayUpgrade` contract + data: new bytes(0), + signature: new bytes(0), + // All factory deps should've been published before + factoryDeps: new uint256[](0), + paymasterInput: new bytes(0), + // Reserved dynamic type for the future use-case. Using it should be avoided, + // But it is still here, just in case we want to enable some additional functionality + reservedDynamic: new bytes(0) + }); + } + + function getNewProtocolVersion() public returns (uint256) { + return 0x1900000000; + } + + function getOldProtocolDeadline() public returns (uint256) { + return 7 days; + } + + function getOldProtocolVersion() public returns (uint256) { + return 0x1800000002; + } + + function provideSetNewVersionUpgradeCall() public returns (Call[] memory calls) { + // Just retrieved it from the contract + uint256 PREVIOUS_PROTOCOL_VERSION = getOldProtocolVersion(); + uint256 DEADLINE = getOldProtocolDeadline(); + uint256 NEW_PROTOCOL_VERSION = getNewProtocolVersion(); + Call memory call = Call({ + target: config.contracts.stateTransitionManagerAddress, + data: abi.encodeCall( + ChainTypeManager.setNewVersionUpgrade, + (getChainUpgradeInfo(), PREVIOUS_PROTOCOL_VERSION, DEADLINE, NEW_PROTOCOL_VERSION) + ), + value: 0 + }); + + calls = new Call[](1); + calls[0] = call; + } + + function getChainUpgradeInfo() public returns (Diamond.DiamondCutData memory upgradeCutData) { + Diamond.FacetCut[] memory deletedFacets = _getFacetCutsForDeletion(); + + Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](deletedFacets.length + 4); + for (uint i = 0; i < deletedFacets.length; i++) { + facetCuts[i] = deletedFacets[i]; + } + facetCuts[deletedFacets.length] = Diamond.FacetCut({ + facet: addresses.stateTransition.adminFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: Utils.getAllSelectors(addresses.stateTransition.adminFacet.code) + }); + facetCuts[deletedFacets.length + 1] = Diamond.FacetCut({ + facet: addresses.stateTransition.gettersFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: Utils.getAllSelectors(addresses.stateTransition.gettersFacet.code) + }); + facetCuts[deletedFacets.length + 2] = Diamond.FacetCut({ + facet: addresses.stateTransition.mailboxFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getAllSelectors(addresses.stateTransition.mailboxFacet.code) + }); + facetCuts[deletedFacets.length + 3] = Diamond.FacetCut({ + facet: addresses.stateTransition.executorFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getAllSelectors(addresses.stateTransition.executorFacet.code) + }); + + VerifierParams memory verifierParams = VerifierParams({ + recursionNodeLevelVkHash: config.contracts.recursionNodeLevelVkHash, + recursionLeafLevelVkHash: config.contracts.recursionLeafLevelVkHash, + recursionCircuitsSetVksHash: config.contracts.recursionCircuitsSetVksHash + }); + + // TODO: we should fill this one up completely, but it is straightforward + IL2ContractDeployer.ForceDeployment[] memory baseForceDeployments = new IL2ContractDeployer.ForceDeployment[]( + 0 + ); + address ctmDeployer = addresses.bridgehub.ctmDeploymentTrackerProxy; + + GatewayUpgradeEncodedInput memory gateUpgradeInput = GatewayUpgradeEncodedInput({ + baseForceDeployments: baseForceDeployments, + ctmDeployer: ctmDeployer, + fixedForceDeploymentsData: generatedData.forceDeploymentsData, + l2GatewayUpgrade: addresses.expectedL2Addresses.expectedL2GatewayUpgrade, + oldValidatorTimelock: config.contracts.oldValidatorTimelock, + newValidatorTimelock: addresses.validatorTimelock + }); + + bytes memory postUpgradeCalldata = abi.encode(gateUpgradeInput); + + ProposedUpgrade memory proposedUpgrade = ProposedUpgrade({ + l2ProtocolUpgradeTx: _composeUpgradeTx(), + factoryDeps: new bytes[](0), + bootloaderHash: config.contracts.bootloaderHash, + defaultAccountHash: config.contracts.defaultAAHash, + verifier: addresses.stateTransition.verifier, + verifierParams: verifierParams, + l1ContractsUpgradeCalldata: new bytes(0), + postUpgradeCalldata: postUpgradeCalldata, + // FIXME: TBH, I am not sure if even should even put any time there, + // but we may + upgradeTimestamp: 0, + newProtocolVersion: getNewProtocolVersion() + }); + + upgradeCutData = Diamond.DiamondCutData({ + facetCuts: facetCuts, + initAddress: addresses.gatewayUpgrade, + initCalldata: abi.encodeCall(GatewayUpgrade.upgrade, (proposedUpgrade)) + }); + } + + function getStage2UpgradeCalls() public returns (Call[] memory calls) { + calls = new Call[](9); + + // We need to firstly update all the contracts + calls[0] = Call({ + target: config.contracts.transparentProxyAdmin, + data: abi.encodeCall( + ProxyAdmin.upgrade, + ( + ITransparentUpgradeableProxy(payable(config.contracts.stateTransitionManagerAddress)), + addresses.stateTransition.chainTypeManagerImplementation + ) + ), + value: 0 + }); + calls[1] = Call({ + target: config.contracts.transparentProxyAdmin, + data: abi.encodeCall( + ProxyAdmin.upgradeAndCall, + ( + ITransparentUpgradeableProxy(payable(config.contracts.bridgehubProxyAddress)), + addresses.bridgehub.bridgehubImplementation, + abi.encodeCall(Bridgehub.initializeV2, ()) + ) + ), + value: 0 + }); + calls[2] = Call({ + target: config.contracts.transparentProxyAdmin, + data: abi.encodeCall( + ProxyAdmin.upgrade, + ( + ITransparentUpgradeableProxy(payable(config.contracts.oldSharedBridgeProxyAddress)), + addresses.bridges.l1NullifierImplementation + ) + ), + value: 0 + }); + calls[3] = Call({ + target: config.contracts.transparentProxyAdmin, + data: abi.encodeCall( + ProxyAdmin.upgrade, + ( + ITransparentUpgradeableProxy(payable(config.contracts.legacyErc20BridgeAddress)), + addresses.bridges.erc20BridgeImplementation + ) + ), + value: 0 + }); + + // Now, updating chain creation params + calls[4] = Call({ + target: config.contracts.stateTransitionManagerAddress, + data: abi.encodeCall(ChainTypeManager.setChainCreationParams, (prepareNewChainCreationParams())), + value: 0 + }); + calls[5] = Call({ + target: config.contracts.stateTransitionManagerAddress, + data: abi.encodeCall(ChainTypeManager.setValidatorTimelock, (addresses.validatorTimelock)), + value: 0 + }); + + // Now, we need to update the bridgehub + calls[6] = Call({ + target: config.contracts.bridgehubProxyAddress, + data: abi.encodeCall( + Bridgehub.setAddresses, + ( + addresses.bridges.sharedBridgeProxy, + CTMDeploymentTracker(addresses.bridgehub.ctmDeploymentTrackerProxy), + MessageRoot(addresses.bridgehub.messageRootProxy) + ) + ), + value: 0 + }); + + // Setting the necessary params for the L1Nullifier contract + calls[7] = Call({ + target: config.contracts.oldSharedBridgeProxyAddress, + data: abi.encodeCall( + L1Nullifier.setL1NativeTokenVault, + (L1NativeTokenVault(payable(addresses.vaults.l1NativeTokenVaultProxy))) + ), + value: 0 + }); + calls[8] = Call({ + target: config.contracts.oldSharedBridgeProxyAddress, + data: abi.encodeCall(L1Nullifier.setL1AssetRouter, (addresses.bridges.sharedBridgeProxy)), + value: 0 + }); + } + + function initializeConfig(string memory configPath) internal { + string memory toml = vm.readFile(configPath); + + config.l1ChainId = block.chainid; + config.deployerAddress = msg.sender; + + // Config file must be parsed key by key, otherwise values returned + // are parsed alfabetically and not by key. + // https://book.getfoundry.sh/cheatcodes/parse-toml + config.eraChainId = toml.readUint("$.era_chain_id"); + config.ownerAddress = toml.readAddress("$.owner_address"); + config.testnetVerifier = toml.readBool("$.testnet_verifier"); + + config.contracts.maxNumberOfChains = toml.readUint("$.contracts.max_number_of_chains"); + config.contracts.create2FactorySalt = toml.readBytes32("$.contracts.create2_factory_salt"); + if (vm.keyExistsToml(toml, "$.contracts.create2_factory_addr")) { + config.contracts.create2FactoryAddr = toml.readAddress("$.contracts.create2_factory_addr"); + } + config.contracts.validatorTimelockExecutionDelay = toml.readUint( + "$.contracts.validator_timelock_execution_delay" + ); + config.contracts.genesisRoot = toml.readBytes32("$.contracts.genesis_root"); + config.contracts.genesisRollupLeafIndex = toml.readUint("$.contracts.genesis_rollup_leaf_index"); + config.contracts.genesisBatchCommitment = toml.readBytes32("$.contracts.genesis_batch_commitment"); + config.contracts.latestProtocolVersion = toml.readUint("$.contracts.latest_protocol_version"); + config.contracts.recursionNodeLevelVkHash = toml.readBytes32("$.contracts.recursion_node_level_vk_hash"); + config.contracts.recursionLeafLevelVkHash = toml.readBytes32("$.contracts.recursion_leaf_level_vk_hash"); + config.contracts.recursionCircuitsSetVksHash = toml.readBytes32("$.contracts.recursion_circuits_set_vks_hash"); + config.contracts.priorityTxMaxGasLimit = toml.readUint("$.contracts.priority_tx_max_gas_limit"); + config.contracts.diamondInitPubdataPricingMode = PubdataPricingMode( + toml.readUint("$.contracts.diamond_init_pubdata_pricing_mode") + ); + config.contracts.diamondInitBatchOverheadL1Gas = toml.readUint( + "$.contracts.diamond_init_batch_overhead_l1_gas" + ); + config.contracts.diamondInitMaxPubdataPerBatch = toml.readUint( + "$.contracts.diamond_init_max_pubdata_per_batch" + ); + config.contracts.diamondInitMaxL2GasPerBatch = toml.readUint("$.contracts.diamond_init_max_l2_gas_per_batch"); + config.contracts.diamondInitPriorityTxMaxPubdata = toml.readUint( + "$.contracts.diamond_init_priority_tx_max_pubdata" + ); + config.contracts.diamondInitMinimalL2GasPrice = toml.readUint("$.contracts.diamond_init_minimal_l2_gas_price"); + config.contracts.defaultAAHash = toml.readBytes32("$.contracts.default_aa_hash"); + config.contracts.bootloaderHash = toml.readBytes32("$.contracts.bootloader_hash"); + + config.contracts.stateTransitionManagerAddress = toml.readAddress( + "$.contracts.state_transition_manager_address" + ); + config.contracts.bridgehubProxyAddress = toml.readAddress("$.contracts.bridgehub_proxy_address"); + config.contracts.oldSharedBridgeProxyAddress = toml.readAddress("$.contracts.old_shared_bridge_proxy_address"); + config.contracts.transparentProxyAdmin = toml.readAddress("$.contracts.transparent_proxy_admin"); + config.contracts.eraDiamondProxy = toml.readAddress("$.contracts.era_diamond_proxy"); + config.contracts.legacyErc20BridgeAddress = toml.readAddress("$.contracts.legacy_erc20_bridge_address"); + config.contracts.oldValidatorTimelock = toml.readAddress("$.contracts.old_validator_timelock"); + // FIXME: value stored there is incorrect at the moment, figure out the correct value + config.contracts.blobVersionedHashRetriever = toml.readAddress("$.contracts.blob_versioned_hash_retriever"); + config.contracts.l2BridgeProxyOwnerAddress = toml.readAddress("$.contracts.l2_bridge_proxy_owner_address"); + config.contracts.l2BridgedStandardERC20ProxyOwnerAddress = toml.readAddress( + "$.contracts.l2_bridged_standard_erc20_proxy_owner_address" + ); + + config.tokens.tokenWethAddress = toml.readAddress("$.tokens.token_weth_address"); + } + + function initializeGeneratedData() internal { + generatedData.forceDeploymentsData = prepareForceDeploymentsData(); + } + + function initializeExpectedL2Addresses() internal { + address aliasedGovernance = AddressAliasHelper.applyL1ToL2Alias(config.ownerAddress); + + address expectedL2ProxyAdminDeployer = Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readProxyAdminDeployerBytecode()), + abi.encode(aliasedGovernance) + ); + address expectedL2ProxyAdmin = L2ContractHelper.computeCreate2Address( + expectedL2ProxyAdminDeployer, + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readProxyAdminBytecode()), + keccak256(hex"") + ); + + address permanentRestrictionImpl = Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readPermanentRestrictionBytecode()), + // Note that for L2 deployments the L2AdminFactory is 0. + abi.encode(L2_BRIDGEHUB_ADDRESS, address(0)) + ); + + address permanentRestrictionProxy = Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readTransparentUpgradeableProxyBytecode()), + abi.encode( + permanentRestrictionImpl, + expectedL2ProxyAdmin, + abi.encodeCall(PermanentRestriction.initialize, (aliasedGovernance)) + ) + ); + + address[] memory requiredL2Restrictions = new address[](1); + requiredL2Restrictions[0] = permanentRestrictionProxy; + + addresses.expectedL2Addresses = ExpectedL2Addresses({ + expectedRollupL2DAValidator: Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readRollupL2DAValidatorBytecode()), + hex"" + ), + expectedValidiumL2DAValidator: Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readValidiumL2DAValidatorBytecode()), + hex"" + ), + expectedL2GatewayUpgrade: Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readGatewayUpgradeBytecode()), + hex"" + ), + l2SharedBridgeLegacyImpl: Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readL2LegacySharedBridgeBytecode()), + hex"" + ), + l2BridgedStandardERC20Impl: Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readStandardERC20Bytecode()), + hex"" + ), + expectedL2ProxyAdminDeployer: expectedL2ProxyAdminDeployer, + expectedL2ProxyAdmin: expectedL2ProxyAdmin, + expectedL2AdminFactory: Utils.getL2AddressViaCreate2Factory( + bytes32(0), + L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readL2AdminFactoryBytecode()), + abi.encode(requiredL2Restrictions) + ), + expectedL2PermanentRestrictionImpl: permanentRestrictionImpl, + expectedL2PermanentRestrictionProxy: permanentRestrictionProxy + }); + } + + function instantiateCreate2Factory() internal { + address contractAddress; + + bool isDeterministicDeployed = DETERMINISTIC_CREATE2_ADDRESS.code.length > 0; + bool isConfigured = config.contracts.create2FactoryAddr != address(0); + + if (isConfigured) { + if (config.contracts.create2FactoryAddr.code.length == 0) { + revert AddressHasNoCode(config.contracts.create2FactoryAddr); + } + contractAddress = config.contracts.create2FactoryAddr; + console.log("Using configured Create2Factory address:", contractAddress); + } else if (isDeterministicDeployed) { + contractAddress = DETERMINISTIC_CREATE2_ADDRESS; + console.log("Using deterministic Create2Factory address:", contractAddress); + } else { + contractAddress = Utils.deployCreate2Factory(); + console.log("Create2Factory deployed at:", contractAddress); + } + + addresses.create2Factory = contractAddress; + } + + function deployVerifier() internal { + bytes memory code; + if (config.testnetVerifier) { + code = type(TestnetVerifier).creationCode; + } else { + code = type(Verifier).creationCode; + } + address contractAddress = deployViaCreate2(code); + console.log("Verifier deployed at:", contractAddress); + addresses.stateTransition.verifier = contractAddress; + } + + function deployDefaultUpgrade() internal { + address contractAddress = deployViaCreate2(type(DefaultUpgrade).creationCode); + console.log("DefaultUpgrade deployed at:", contractAddress); + addresses.stateTransition.defaultUpgrade = contractAddress; + } + + function deployGenesisUpgrade() internal { + bytes memory bytecode = abi.encodePacked(type(L1GenesisUpgrade).creationCode); + address contractAddress = deployViaCreate2(bytecode); + console.log("GenesisUpgrade deployed at:", contractAddress); + addresses.stateTransition.genesisUpgrade = contractAddress; + } + + function deployGatewayUpgrade() internal { + bytes memory bytecode = abi.encodePacked(type(GatewayUpgrade).creationCode); + address contractAddress = deployViaCreate2(bytecode); + console.log("GatewayUpgrade deployed at:", contractAddress); + addresses.gatewayUpgrade = contractAddress; + } + + function deployDAValidators() internal { + address contractAddress = deployViaCreate2(Utils.readRollupDAValidatorBytecode()); + console.log("L1RollupDAValidator deployed at:", contractAddress); + addresses.daAddresses.l1RollupDAValidator = contractAddress; + + contractAddress = deployViaCreate2(type(ValidiumL1DAValidator).creationCode); + console.log("L1ValidiumDAValidator deployed at:", contractAddress); + addresses.daAddresses.l1ValidiumDAValidator = contractAddress; + } + + function deployPermanentRollupRestriction() internal { + bytes memory bytecode = abi.encodePacked( + type(PermanentRestriction).creationCode, + abi.encode(config.contracts.bridgehubProxyAddress, addresses.expectedL2Addresses.expectedL2AdminFactory) + ); + address implementationAddress = deployViaCreate2(bytecode); + + bytes memory proxyBytecode = abi.encodePacked( + type(TransparentUpgradeableProxy).creationCode, + abi.encode( + implementationAddress, + config.contracts.transparentProxyAdmin, + abi.encodeCall(PermanentRestriction.initialize, (config.deployerAddress)) + ) + ); + + address proxyAddress = deployViaCreate2(proxyBytecode); + addresses.permanentRollupRestriction = proxyAddress; + // FIXME: supply restrictions + } + + function deployValidatorTimelock() internal { + uint32 executionDelay = uint32(config.contracts.validatorTimelockExecutionDelay); + bytes memory bytecode = abi.encodePacked( + type(ValidatorTimelock).creationCode, + abi.encode(config.deployerAddress, executionDelay, config.eraChainId) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("ValidatorTimelock deployed at:", contractAddress); + addresses.validatorTimelock = contractAddress; + } + + function deployChainAdmin() internal { + bytes memory accessControlRestrictionBytecode = abi.encodePacked( + type(AccessControlRestriction).creationCode, + abi.encode(uint256(0), config.ownerAddress) + ); + + address accessControlRestriction = deployViaCreate2(accessControlRestrictionBytecode); + console.log("Access control restriction deployed at:", accessControlRestriction); + address[] memory restrictions = new address[](1); + restrictions[0] = accessControlRestriction; + addresses.accessControlRestrictionAddress = accessControlRestriction; + + bytes memory bytecode = abi.encodePacked(type(ChainAdmin).creationCode, abi.encode(restrictions)); + address contractAddress = deployViaCreate2(bytecode); + console.log("ChainAdmin deployed at:", contractAddress); + addresses.chainAdmin = contractAddress; + } + + function deployBridgehubImplementation() internal { + bytes memory bridgeHubBytecode = abi.encodePacked( + type(Bridgehub).creationCode, + abi.encode(config.l1ChainId, config.ownerAddress, (config.contracts.maxNumberOfChains)) + ); + address bridgehubImplementation = deployViaCreate2(bridgeHubBytecode); + console.log("Bridgehub Implementation deployed at:", bridgehubImplementation); + addresses.bridgehub.bridgehubImplementation = bridgehubImplementation; + } + + function deployMessageRootContract() internal { + bytes memory messageRootBytecode = abi.encodePacked( + type(MessageRoot).creationCode, + abi.encode(config.contracts.bridgehubProxyAddress) + ); + address messageRootImplementation = deployViaCreate2(messageRootBytecode); + console.log("MessageRoot Implementation deployed at:", messageRootImplementation); + addresses.bridgehub.messageRootImplementation = messageRootImplementation; + + bytes memory bytecode = abi.encodePacked( + type(TransparentUpgradeableProxy).creationCode, + abi.encode( + messageRootImplementation, + config.contracts.transparentProxyAdmin, + abi.encodeCall(MessageRoot.initialize, ()) + ) + ); + address messageRootProxy = deployViaCreate2(bytecode); + console.log("Message Root Proxy deployed at:", messageRootProxy); + addresses.bridgehub.messageRootProxy = messageRootProxy; + } + + function deployCTMDeploymentTracker() internal { + bytes memory ctmDTBytecode = abi.encodePacked( + type(CTMDeploymentTracker).creationCode, + abi.encode(config.contracts.bridgehubProxyAddress, addresses.bridges.sharedBridgeProxy) + ); + address ctmDTImplementation = deployViaCreate2(ctmDTBytecode); + console.log("CTM Deployment Tracker Implementation deployed at:", ctmDTImplementation); + addresses.bridgehub.ctmDeploymentTrackerImplementation = ctmDTImplementation; + + bytes memory bytecode = abi.encodePacked( + type(TransparentUpgradeableProxy).creationCode, + abi.encode( + ctmDTImplementation, + config.contracts.transparentProxyAdmin, + abi.encodeCall(CTMDeploymentTracker.initialize, (config.deployerAddress)) + ) + ); + address ctmDTProxy = deployViaCreate2(bytecode); + console.log("CTM Deployment Tracker Proxy deployed at:", ctmDTProxy); + addresses.bridgehub.ctmDeploymentTrackerProxy = ctmDTProxy; + } + + function deployChainTypeManagerContract() internal { + deployStateTransitionDiamondFacets(); + deployChainTypeManagerImplementation(); + // registerChainTypeManager(); + } + + function deployStateTransitionDiamondFacets() internal { + address executorFacet = deployViaCreate2(type(ExecutorFacet).creationCode); + console.log("ExecutorFacet deployed at:", executorFacet); + addresses.stateTransition.executorFacet = executorFacet; + + address adminFacet = deployViaCreate2( + abi.encodePacked(type(AdminFacet).creationCode, abi.encode(config.l1ChainId)) + ); + console.log("AdminFacet deployed at:", adminFacet); + addresses.stateTransition.adminFacet = adminFacet; + + address mailboxFacet = deployViaCreate2( + abi.encodePacked(type(MailboxFacet).creationCode, abi.encode(config.eraChainId, config.l1ChainId)) + ); + console.log("MailboxFacet deployed at:", mailboxFacet); + addresses.stateTransition.mailboxFacet = mailboxFacet; + + address gettersFacet = deployViaCreate2(type(GettersFacet).creationCode); + console.log("GettersFacet deployed at:", gettersFacet); + addresses.stateTransition.gettersFacet = gettersFacet; + + address diamondInit = deployViaCreate2(type(DiamondInit).creationCode); + console.log("DiamondInit deployed at:", diamondInit); + addresses.stateTransition.diamondInit = diamondInit; + } + + function deployChainTypeManagerImplementation() internal { + bytes memory bytecode = abi.encodePacked( + type(ChainTypeManager).creationCode, + abi.encode(config.contracts.bridgehubProxyAddress) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("ChainTypeManagerImplementation deployed at:", contractAddress); + addresses.stateTransition.chainTypeManagerImplementation = contractAddress; + } + + function setChainTypeManagerInValidatorTimelock() internal { + ValidatorTimelock validatorTimelock = ValidatorTimelock(addresses.validatorTimelock); + vm.broadcast(msg.sender); + validatorTimelock.setChainTypeManager(IChainTypeManager(config.contracts.stateTransitionManagerAddress)); + console.log("ChainTypeManager set in ValidatorTimelock"); + } + + function deploySharedBridgeContracts() internal { + deploySharedBridgeImplementation(); + deploySharedBridgeProxy(); + setL1LegacyBridge(); + } + + function deployL1NullifierContracts() internal { + deployL1NullifierImplementation(); + } + + function deployL1NullifierImplementation() internal { + // TODO(EVM-743): allow non-dev nullifier in the local deployment + bytes memory bytecode = abi.encodePacked( + type(L1NullifierDev).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode(config.contracts.bridgehubProxyAddress, config.eraChainId, config.contracts.eraDiamondProxy) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("L1NullifierImplementation deployed at:", contractAddress); + addresses.bridges.l1NullifierImplementation = contractAddress; + } + + function deploySharedBridgeImplementation() internal { + bytes memory bytecode = abi.encodePacked( + type(L1AssetRouter).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode( + config.tokens.tokenWethAddress, + config.contracts.bridgehubProxyAddress, + config.contracts.oldSharedBridgeProxyAddress, + config.eraChainId, + config.contracts.eraDiamondProxy + ) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("SharedBridgeImplementation deployed at:", contractAddress); + addresses.bridges.sharedBridgeImplementation = contractAddress; + } + + function deploySharedBridgeProxy() internal { + bytes memory initCalldata = abi.encodeCall(L1AssetRouter.initialize, (config.deployerAddress)); + bytes memory bytecode = abi.encodePacked( + type(TransparentUpgradeableProxy).creationCode, + abi.encode( + addresses.bridges.sharedBridgeImplementation, + config.contracts.transparentProxyAdmin, + initCalldata + ) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("SharedBridgeProxy deployed at:", contractAddress); + addresses.bridges.sharedBridgeProxy = contractAddress; + } + + function setL1LegacyBridge() internal { + vm.broadcast(msg.sender); + L1AssetRouter(addresses.bridges.sharedBridgeProxy).setL1Erc20Bridge( + L1ERC20Bridge(config.contracts.legacyErc20BridgeAddress) + ); + } + + function deployErc20BridgeImplementation() internal { + bytes memory bytecode = abi.encodePacked( + type(L1ERC20Bridge).creationCode, + abi.encode( + config.contracts.oldSharedBridgeProxyAddress, + addresses.bridges.sharedBridgeProxy, + addresses.vaults.l1NativeTokenVaultProxy, + config.eraChainId + ) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("Erc20BridgeImplementation deployed at:", contractAddress); + addresses.bridges.erc20BridgeImplementation = contractAddress; + } + + function deployBridgedStandardERC20Implementation() internal { + bytes memory bytecode = abi.encodePacked( + type(BridgedStandardERC20).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode() + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("BridgedStandardERC20Implementation deployed at:", contractAddress); + addresses.bridges.bridgedStandardERC20Implementation = contractAddress; + } + + function deployBridgedTokenBeacon() internal { + bytes memory bytecode = abi.encodePacked( + type(UpgradeableBeacon).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode(addresses.bridges.bridgedStandardERC20Implementation) + ); + UpgradeableBeacon beacon = new UpgradeableBeacon(addresses.bridges.bridgedStandardERC20Implementation); + address contractAddress = address(beacon); + beacon.transferOwnership(config.ownerAddress); + console.log("BridgedTokenBeacon deployed at:", contractAddress); + addresses.bridges.bridgedTokenBeacon = contractAddress; + } + + function deployL1NativeTokenVaultImplementation() internal { + bytes memory bytecode = abi.encodePacked( + type(L1NativeTokenVault).creationCode, + // solhint-disable-next-line func-named-parameters + abi.encode( + config.tokens.tokenWethAddress, + addresses.bridges.sharedBridgeProxy, + config.eraChainId, + config.contracts.oldSharedBridgeProxyAddress + ) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("L1NativeTokenVaultImplementation deployed at:", contractAddress); + addresses.vaults.l1NativeTokenVaultImplementation = contractAddress; + } + + function deployL1NativeTokenVaultProxy() internal { + bytes memory initCalldata = abi.encodeCall( + L1NativeTokenVault.initialize, + (config.ownerAddress, addresses.bridges.bridgedTokenBeacon) + ); + bytes memory bytecode = abi.encodePacked( + type(TransparentUpgradeableProxy).creationCode, + abi.encode( + addresses.vaults.l1NativeTokenVaultImplementation, + config.contracts.transparentProxyAdmin, + initCalldata + ) + ); + address contractAddress = deployViaCreate2(bytecode); + console.log("L1NativeTokenVaultProxy deployed at:", contractAddress); + addresses.vaults.l1NativeTokenVaultProxy = contractAddress; + + IL1AssetRouter sharedBridge = IL1AssetRouter(addresses.bridges.sharedBridgeProxy); + IL1Nullifier l1Nullifier = IL1Nullifier(config.contracts.oldSharedBridgeProxyAddress); + // Ownable ownable = Ownable(addresses.bridges.sharedBridgeProxy); + + vm.broadcast(msg.sender); + sharedBridge.setNativeTokenVault(INativeTokenVault(addresses.vaults.l1NativeTokenVaultProxy)); + vm.broadcast(msg.sender); + IL1NativeTokenVault(addresses.vaults.l1NativeTokenVaultProxy).registerEthToken(); + } + + function deployTransitionaryOwner() internal { + bytes memory bytecode = abi.encodePacked( + type(TransitionaryOwner).creationCode, + abi.encode(config.ownerAddress) + ); + + addresses.transitionaryOwner = deployViaCreate2(bytecode); + } + + function _moveGovernanceToOwner(address target) internal { + Ownable2StepUpgradeable(target).transferOwnership(addresses.transitionaryOwner); + TransitionaryOwner(addresses.transitionaryOwner).claimOwnershipAndGiveToGovernance(target); + } + + function updateOwners() internal { + vm.startBroadcast(msg.sender); + + // Note, that it will take some time for the governance to sign the "acceptOwnership" transaction, + // in order to avoid any possibility of the front-run, we will temporarily give the ownership to the + // contract that can only transfer ownership to the governance. + _moveGovernanceToOwner(addresses.validatorTimelock); + _moveGovernanceToOwner(addresses.bridges.sharedBridgeProxy); + _moveGovernanceToOwner(addresses.bridgehub.ctmDeploymentTrackerProxy); + _moveGovernanceToOwner(addresses.permanentRollupRestriction); + + vm.stopBroadcast(); + console.log("Owners updated"); + } + + function prepareNewChainCreationParams() internal returns (ChainCreationParams memory chainCreationParams) { + Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](4); + facetCuts[0] = Diamond.FacetCut({ + facet: addresses.stateTransition.adminFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: Utils.getAllSelectors(addresses.stateTransition.adminFacet.code) + }); + facetCuts[1] = Diamond.FacetCut({ + facet: addresses.stateTransition.gettersFacet, + action: Diamond.Action.Add, + isFreezable: false, + selectors: Utils.getAllSelectors(addresses.stateTransition.gettersFacet.code) + }); + facetCuts[2] = Diamond.FacetCut({ + facet: addresses.stateTransition.mailboxFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getAllSelectors(addresses.stateTransition.mailboxFacet.code) + }); + facetCuts[3] = Diamond.FacetCut({ + facet: addresses.stateTransition.executorFacet, + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getAllSelectors(addresses.stateTransition.executorFacet.code) + }); + + VerifierParams memory verifierParams = VerifierParams({ + recursionNodeLevelVkHash: config.contracts.recursionNodeLevelVkHash, + recursionLeafLevelVkHash: config.contracts.recursionLeafLevelVkHash, + recursionCircuitsSetVksHash: config.contracts.recursionCircuitsSetVksHash + }); + + FeeParams memory feeParams = FeeParams({ + pubdataPricingMode: config.contracts.diamondInitPubdataPricingMode, + batchOverheadL1Gas: uint32(config.contracts.diamondInitBatchOverheadL1Gas), + maxPubdataPerBatch: uint32(config.contracts.diamondInitMaxPubdataPerBatch), + maxL2GasPerBatch: uint32(config.contracts.diamondInitMaxL2GasPerBatch), + priorityTxMaxPubdata: uint32(config.contracts.diamondInitPriorityTxMaxPubdata), + minimalL2GasPrice: uint64(config.contracts.diamondInitMinimalL2GasPrice) + }); + + DiamondInitializeDataNewChain memory initializeData = DiamondInitializeDataNewChain({ + dualVerifier: IVerifier(addresses.stateTransition.dualVerifier), + plonkVerifier: addresses.stateTransition.plonkVerifier, + fflonkVerifier: addresses.stateTransition.fflonkVerifier, + fflonkProofLength: config.contracts.fflonkProofLength, + verifierParams: verifierParams, + l2BootloaderBytecodeHash: config.contracts.bootloaderHash, + l2DefaultAccountBytecodeHash: config.contracts.defaultAAHash, + priorityTxMaxGasLimit: config.contracts.priorityTxMaxGasLimit, + feeParams: feeParams, + blobVersionedHashRetriever: config.contracts.blobVersionedHashRetriever + }); + + Diamond.DiamondCutData memory diamondCut = Diamond.DiamondCutData({ + facetCuts: facetCuts, + initAddress: addresses.stateTransition.diamondInit, + initCalldata: abi.encode(initializeData) + }); + + chainCreationParams = ChainCreationParams({ + genesisUpgrade: addresses.stateTransition.genesisUpgrade, + genesisBatchHash: config.contracts.genesisRoot, + genesisIndexRepeatedStorageChanges: uint64(config.contracts.genesisRollupLeafIndex), + genesisBatchCommitment: config.contracts.genesisBatchCommitment, + diamondCut: diamondCut, + forceDeploymentsData: generatedData.forceDeploymentsData + }); + } + + function saveOutput(string memory outputPath) internal { + vm.serializeAddress("bridgehub", "bridgehub_implementation_addr", addresses.bridgehub.bridgehubImplementation); + vm.serializeAddress( + "bridgehub", + "ctm_deployment_tracker_proxy_addr", + addresses.bridgehub.ctmDeploymentTrackerProxy + ); + vm.serializeAddress( + "bridgehub", + "ctm_deployment_tracker_implementation_addr", + addresses.bridgehub.ctmDeploymentTrackerImplementation + ); + vm.serializeAddress("bridgehub", "message_root_proxy_addr", addresses.bridgehub.messageRootProxy); + string memory bridgehub = vm.serializeAddress( + "bridgehub", + "message_root_implementation_addr", + addresses.bridgehub.messageRootImplementation + ); + + // TODO(EVM-744): this has to be renamed to chain type manager + vm.serializeAddress( + "state_transition", + "state_transition_implementation_addr", + addresses.stateTransition.chainTypeManagerImplementation + ); + vm.serializeAddress("state_transition", "verifier_addr", addresses.stateTransition.verifier); + vm.serializeAddress("state_transition", "admin_facet_addr", addresses.stateTransition.adminFacet); + vm.serializeAddress("state_transition", "mailbox_facet_addr", addresses.stateTransition.mailboxFacet); + vm.serializeAddress("state_transition", "executor_facet_addr", addresses.stateTransition.executorFacet); + vm.serializeAddress("state_transition", "getters_facet_addr", addresses.stateTransition.gettersFacet); + vm.serializeAddress("state_transition", "diamond_init_addr", addresses.stateTransition.diamondInit); + vm.serializeAddress("state_transition", "genesis_upgrade_addr", addresses.stateTransition.genesisUpgrade); + string memory stateTransition = vm.serializeAddress( + "state_transition", + "default_upgrade_addr", + addresses.stateTransition.defaultUpgrade + ); + + vm.serializeAddress("bridges", "erc20_bridge_implementation_addr", addresses.bridges.erc20BridgeImplementation); + vm.serializeAddress("bridges", "l1_nullifier_implementation_addr", addresses.bridges.l1NullifierImplementation); + vm.serializeAddress( + "bridges", + "shared_bridge_implementation_addr", + addresses.bridges.sharedBridgeImplementation + ); + string memory bridges = vm.serializeAddress( + "bridges", + "shared_bridge_proxy_addr", + addresses.bridges.sharedBridgeProxy + ); + + vm.serializeUint( + "contracts_config", + "diamond_init_max_l2_gas_per_batch", + config.contracts.diamondInitMaxL2GasPerBatch + ); + vm.serializeUint( + "contracts_config", + "diamond_init_batch_overhead_l1_gas", + config.contracts.diamondInitBatchOverheadL1Gas + ); + vm.serializeUint( + "contracts_config", + "diamond_init_max_pubdata_per_batch", + config.contracts.diamondInitMaxPubdataPerBatch + ); + vm.serializeUint( + "contracts_config", + "diamond_init_minimal_l2_gas_price", + config.contracts.diamondInitMinimalL2GasPrice + ); + vm.serializeUint( + "contracts_config", + "diamond_init_priority_tx_max_pubdata", + config.contracts.diamondInitPriorityTxMaxPubdata + ); + vm.serializeUint( + "contracts_config", + "diamond_init_pubdata_pricing_mode", + uint256(config.contracts.diamondInitPubdataPricingMode) + ); + vm.serializeUint("contracts_config", "priority_tx_max_gas_limit", config.contracts.priorityTxMaxGasLimit); + vm.serializeBytes32( + "contracts_config", + "recursion_circuits_set_vks_hash", + config.contracts.recursionCircuitsSetVksHash + ); + vm.serializeBytes32( + "contracts_config", + "recursion_leaf_level_vk_hash", + config.contracts.recursionLeafLevelVkHash + ); + vm.serializeBytes32( + "contracts_config", + "recursion_node_level_vk_hash", + config.contracts.recursionNodeLevelVkHash + ); + + vm.serializeAddress( + "contracts_config", + "expected_rollup_l2_da_validator", + addresses.expectedL2Addresses.expectedRollupL2DAValidator + ); + vm.serializeAddress( + "contracts_config", + "expected_validium_l2_da_validator", + addresses.expectedL2Addresses.expectedValidiumL2DAValidator + ); + vm.serializeAddress( + "contracts_config", + "expected_l2_gateway_upgrade", + addresses.expectedL2Addresses.expectedL2GatewayUpgrade + ); + vm.serializeBytes("contracts_config", "diamond_cut_data", generatedData.diamondCutData); + + string memory contractsConfig = vm.serializeBytes( + "contracts_config", + "force_deployments_data", + generatedData.forceDeploymentsData + ); + + vm.serializeAddress("deployed_addresses", "validator_timelock_addr", addresses.validatorTimelock); + vm.serializeAddress("deployed_addresses", "chain_admin", addresses.chainAdmin); + vm.serializeAddress( + "deployed_addresses", + "access_control_restriction_addr", + addresses.accessControlRestrictionAddress + ); + vm.serializeAddress("deployed_addresses", "permanent_rollup_restriction", addresses.permanentRollupRestriction); + vm.serializeString("deployed_addresses", "bridgehub", bridgehub); + vm.serializeString("deployed_addresses", "bridges", bridges); + vm.serializeString("deployed_addresses", "state_transition", stateTransition); + + vm.serializeAddress( + "deployed_addresses", + "rollup_l1_da_validator_addr", + addresses.daAddresses.l1RollupDAValidator + ); + vm.serializeAddress( + "deployed_addresses", + "validium_l1_da_validator_addr", + addresses.daAddresses.l1ValidiumDAValidator + ); + + string memory deployedAddresses = vm.serializeAddress( + "deployed_addresses", + "native_token_vault_addr", + addresses.vaults.l1NativeTokenVaultProxy + ); + + vm.serializeAddress("root", "create2_factory_addr", addresses.create2Factory); + vm.serializeBytes32("root", "create2_factory_salt", config.contracts.create2FactorySalt); + vm.serializeUint("root", "l1_chain_id", config.l1ChainId); + vm.serializeUint("root", "era_chain_id", config.eraChainId); + vm.serializeAddress("root", "deployer_addr", config.deployerAddress); + vm.serializeString("root", "deployed_addresses", deployedAddresses); + vm.serializeString("root", "contracts_config", contractsConfig); + string memory toml = vm.serializeAddress("root", "owner_address", config.ownerAddress); + + vm.writeToml(toml, outputPath); + } + + function deployViaCreate2(bytes memory _bytecode) internal returns (address) { + return Utils.deployViaCreate2(_bytecode, config.contracts.create2FactorySalt, addresses.create2Factory); + } + + function prepareForceDeploymentsData() internal view returns (bytes memory) { + require(config.ownerAddress != address(0), "owner not set"); + + FixedForceDeploymentsData memory data = FixedForceDeploymentsData({ + l1ChainId: config.l1ChainId, + eraChainId: config.eraChainId, + l1AssetRouter: addresses.bridges.sharedBridgeProxy, + l2TokenProxyBytecodeHash: L2ContractHelper.hashL2Bytecode( + L2ContractsBytecodesLib.readBeaconProxyBytecode() + ), + aliasedL1Governance: AddressAliasHelper.applyL1ToL2Alias(config.ownerAddress), + maxNumberOfZKChains: config.contracts.maxNumberOfChains, + bridgehubBytecodeHash: L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readBridgehubBytecode()), + l2AssetRouterBytecodeHash: L2ContractHelper.hashL2Bytecode( + L2ContractsBytecodesLib.readL2AssetRouterBytecode() + ), + l2NtvBytecodeHash: L2ContractHelper.hashL2Bytecode( + L2ContractsBytecodesLib.readL2NativeTokenVaultBytecode() + ), + messageRootBytecodeHash: L2ContractHelper.hashL2Bytecode(L2ContractsBytecodesLib.readMessageRootBytecode()), + l2SharedBridgeLegacyImpl: addresses.expectedL2Addresses.l2SharedBridgeLegacyImpl, + l2BridgedStandardERC20Impl: addresses.expectedL2Addresses.l2BridgedStandardERC20Impl, + l2BridgeProxyOwnerAddress: config.contracts.l2BridgeProxyOwnerAddress, + l2BridgedStandardERC20ProxyOwnerAddress: config.contracts.l2BridgedStandardERC20ProxyOwnerAddress + }); + + return abi.encode(data); + } + + // add this to be excluded from coverage report + function test() internal {} +} diff --git a/l1-contracts/foundry.toml b/l1-contracts/foundry.toml index 82910dad0..b4dc2ca3b 100644 --- a/l1-contracts/foundry.toml +++ b/l1-contracts/foundry.toml @@ -1,24 +1,42 @@ [profile.default] -src = 'contracts' -out = 'out' -libs = ['node_modules', 'lib'] -remappings = [ - "@openzeppelin/contracts/=lib/openzeppelin-contracts/contracts/", - "@openzeppelin/contracts-upgradeable/=lib/openzeppelin-contracts-upgradeable/contracts/", - "l2-contracts/=../l2-contracts/contracts/" -] allow_paths = ["../l2-contracts/contracts"] +src = "contracts" +out = "out" +libs = ["node_modules", "./lib", "../da-contracts/"] +cache_path = "cache-forge" +test = "test/foundry" +solc_version = "0.8.24" +evm_version = "cancun" fs_permissions = [ { access = "read", path = "../system-contracts/bootloader/build/artifacts" }, { access = "read", path = "../system-contracts/artifacts-zk/contracts-preprocessed" }, { access = "read", path = "../l2-contracts/artifacts-zk/" }, + { access = "read", path = "../l1-contracts/artifacts-zk/" }, + { access = "read", path = "../da-contracts/" }, + { access = "read", path = "../system-contracts/zkout/" }, { access = "read", path = "./script-config" }, { access = "read-write", path = "./script-out" }, - { access = "read", path = "./out" } + { access = "read", path = "./out" }, + { access = "read-write", path = "./test/foundry/l1/integration/deploy-scripts/script-config/" }, + { access = "read-write", path = "./test/foundry/l1/integration/deploy-scripts/script-out/" }, + { access = "read-write", path = "./test/foundry/l1/integration/upgrade-envs/script-config/" }, + { access = "read-write", path = "./test/foundry/l1/integration/upgrade-envs/script-out/" }, + { access = "read", path = "zkout" }, ] -cache_path = 'cache-forge' -test = 'test/foundry' -solc_version = "0.8.24" -evm_version = "cancun" - -# See more config options https://github.com/foundry-rs/foundry/tree/master/crates/config +ignored_error_codes = ["missing-receive-ether", "code-size"] +ignored_warnings_from = ["test", "contracts/dev-contracts"] +suppressed_warnings = ["txorigin"] +remappings = [ + "forge-std/=lib/forge-std/src/", + "murky/=lib/murky/src/", + "foundry-test/=test/foundry/", + "l2-contracts/=../l2-contracts/contracts/", + "da-contracts/=../da-contracts/contracts/", + "@openzeppelin/contracts-v4/=lib/openzeppelin-contracts-v4/contracts/", + "@openzeppelin/contracts-upgradeable-v4/=lib/openzeppelin-contracts-upgradeable-v4/contracts/", +] +optimizer = true +optimizer_runs = 200 +[profile.default.zksync] +enable_eravm_extensions = true +zksolc = "1.5.3" diff --git a/l1-contracts/fuzz/Verifier/Verifier.t.sol b/l1-contracts/fuzz/Verifier/Verifier.t.sol new file mode 100644 index 000000000..bac3f2e6f --- /dev/null +++ b/l1-contracts/fuzz/Verifier/Verifier.t.sol @@ -0,0 +1,86 @@ +pragma solidity ^0.8.20; +import {Test, console} from "forge-std/Test.sol"; +import {VerifierTest} from "solpp/dev-contracts/test/VerifierTest.sol"; +import {Verifier} from "solpp/zksync/Verifier.sol"; +import "@openzeppelin/contracts/utils/Strings.sol"; + +contract VerifierFuzzTest is Test { + uint256 Q_MOD = 21888242871839275222246405745257275088696311157297823662689037894645226208583; + uint256 R_MOD = 21888242871839275222246405745257275088548364400416034343698204186575808495617; + + uint256[] public publicInputs; + uint256[] public serializedProof; + uint256[] public recursiveAggregationInput; + + Verifier public verifier; + + function setUp() public virtual { + publicInputs.push(17257057577815541751225964212897374444694342989384539141520877492729); + + serializedProof.push(10032255692304426541958487424837706541667730769782503366592797609781788557424); + serializedProof.push(11856023086316274558845067687080284266010851703055534566998849536424959073766); + serializedProof.push(1946976494418613232642071265529572704802622739887191787991738703483400525159); + serializedProof.push(1328106069458824013351862477593422369726189688844441245167676630500797673929); + serializedProof.push(15488976127650523079605218040232167291115155239002840072043251018873550258833); + serializedProof.push(4352460820258659596860226525221943504756149602617718032378962471842121872064); + serializedProof.push(10499239305859992443759785453270906003243074359959242371675950941500942473773); + serializedProof.push(21347231097799123231227724221565041889687686131480556177475242020711996173235); + serializedProof.push(21448274562455512652922184359722637546669181231038098300951155169465175447933); + serializedProof.push(5224615512030263722410009061780530125927659699046094954022444377569738464640); + serializedProof.push(457781538876079938778845275495204146302569607395268192839148474821758081582); + serializedProof.push(18861735728246155975127314860333796285284072325207684293054713266899263027595); + serializedProof.push(16303944945368742900183889655415585360236645961122617249176044814801835577336); + serializedProof.push(13035945439947210396602249585896632733250124877036427100939804737514358838409); + serializedProof.push(5344210729159253547334947774998425118220137275601995670629358314205854915831); + serializedProof.push(5798533246034358556434877465898581616792677631188370022078168611592512620805); + serializedProof.push(17389657286129893116489015409587246992530648956814855147744210777822507444908); + serializedProof.push(2287244647342394712608648573347732257083870498255199596324312699868511383792); + serializedProof.push(4008043766112513713076111464601725311991199944328610186851424132679188418647); + serializedProof.push(1192776719848445147414966176395169615865534126881763324071908049917030138759); + serializedProof.push(21297794452895123333253856666749932934399762330444876027734824957603009458926); + serializedProof.push(17125994169200693606182326100834606153690416627082476471630567824088261322122); + serializedProof.push(13696978282153979214307382954559709118587582183649354744253374201589715565327); + serializedProof.push(19885518441500677676836488338931187143852666523909650686513498826535451677070); + serializedProof.push(1205434280320863211046275554464591162919269140938371417889032165323835178587); + serializedProof.push(17633172995805911347980792921300006225132501482343225088847242025756974009163); + serializedProof.push(16438080406761371143473961144300947125022788905488819913014533292593141026205); + serializedProof.push(5069081552536259237104332491140391551180511112980430307676595350165020188468); + serializedProof.push(21217317205917200275887696442048162383709998732382676029165079037795626916156); + serializedProof.push(19474466610515117278975027596198570980840609656738255347763182823792179771539); + serializedProof.push(9744176601826774967534277982058590459006781888895542911226406188087317156914); + serializedProof.push(13171230402193025939763214267878900142876558410430734782028402821166810894141); + serializedProof.push(11775403006142607980192261369108550982244126464568678337528680604943636677964); + serializedProof.push(6903612341636669639883555213872265187697278660090786759295896380793937349335); + serializedProof.push(10197105415769290664169006387603164525075746474380469980600306405504981186043); + serializedProof.push(10143152486514437388737642096964118742712576889537781270260677795662183637771); + serializedProof.push(7662095231333811948165764727904932118187491073896301295018543320499906824310); + serializedProof.push(929422796511992741418500336817719055655694499787310043166783539202506987065); + serializedProof.push(13837024938095280064325737989251964639823205065380219552242839155123572433059); + serializedProof.push(11738888513780631372636453609299803548810759208935038785934252961078387526204); + serializedProof.push(16528875312985292109940444015943812939751717229020635856725059316776921546668); + serializedProof.push(17525167117689648878398809303253004706004801107861280044640132822626802938868); + serializedProof.push(7419167499813234488108910149511390953153207250610705609008080038658070088540); + serializedProof.push(11628425014048216611195735618191126626331446742771562481735017471681943914146); + + verifier = new VerifierTest(); + } + + + function testFuzz_EllipticCurvePointAtInfinity_shouldRevert(uint8 index) public{ + vm.assume(index!=0 && index<43); + + uint256[] memory newSerializedProof = serializedProof; + string[] memory cmds = new string[](3); + cmds[0] = "python3"; + cmds[1] = "test/foundry/unit/fuzz/Verifier/randomVal.py"; + cmds[2] = Strings.toString(uint(index)); + bytes memory result = vm.ffi(cmds); + uint8[] memory values = abi.decode(result,(uint8[])); + for(uint i;i") .option("--only-verifier") .action(async (cmd) => { - const deployWallet = cmd.privateKey - ? new Wallet(cmd.privateKey, provider) - : Wallet.fromMnemonic( - process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, - "m/44'/60'/0'/0/1" - ).connect(provider); + let deployWallet: ethers.Wallet | ZkWallet; + + if (process.env.CONTRACTS_BASE_NETWORK_ZKSYNC === "true") { + const provider = new ZkProvider(web3Url()); + deployWallet = cmd.privateKey + ? new ZkWallet(cmd.privateKey, provider) + : ZkWallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider); + } else { + deployWallet = cmd.privateKey + ? new Wallet(cmd.privateKey, provider) + : Wallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider); + } + console.log(`Using deployer wallet: ${deployWallet.address}`); const ownerAddress = cmd.ownerAddress ? cmd.ownerAddress : deployWallet.address; @@ -55,6 +69,10 @@ async function main() { verbose: true, }); + if (deployer.isZkMode()) { + console.log("Deploying on a zkSync network!"); + } + await initialBridgehubDeployment(deployer, [], gasPrice, cmd.onlyVerifier, create2Salt, nonce); }); diff --git a/l1-contracts/scripts/display-governance.ts b/l1-contracts/scripts/display-governance.ts index d6b4846f1..4b6741386 100644 --- a/l1-contracts/scripts/display-governance.ts +++ b/l1-contracts/scripts/display-governance.ts @@ -10,10 +10,10 @@ import { applyL1ToL2Alias, getAddressFromEnv } from "../src.ts/utils"; import * as fs from "fs"; import { UpgradeableBeaconFactory } from "../../l2-contracts/typechain/UpgradeableBeaconFactory"; -import { Provider } from "zksync-web3"; +import { Provider } from "zksync-ethers"; const l2SharedBridgeABI = JSON.parse( - fs.readFileSync("../zksync/artifacts-zk/contracts/bridge/L2SharedBridge.sol/L2SharedBridge.json").toString() + fs.readFileSync("../zksync/artifacts-zk/contracts/bridge/L2AssetRouter.sol/L2SharedBridge.json").toString() ).abi; async function getERC20BeaconAddress(l2SharedBridgeAddress: string) { diff --git a/l1-contracts/scripts/initialize-l2-weth-token.ts b/l1-contracts/scripts/initialize-l2-weth-token.ts index f38b1f8f8..7cb09f075 100644 --- a/l1-contracts/scripts/initialize-l2-weth-token.ts +++ b/l1-contracts/scripts/initialize-l2-weth-token.ts @@ -16,11 +16,11 @@ const provider = web3Provider(); const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, "etc/test_config/constant"); const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: "utf-8" })); -const contractArtifactsPath = path.join(process.env.ZKSYNC_HOME as string, "contracts/l2-contracts/artifacts-zk/"); +const contractArtifactsPath = path.join(process.env.ZKSYNC_HOME as string, "contracts/l1-contracts/artifacts-zk/"); const l2BridgeArtifactsPath = path.join(contractArtifactsPath, "contracts/bridge/"); const openzeppelinTransparentProxyArtifactsPath = path.join( contractArtifactsPath, - "@openzeppelin/contracts/proxy/transparent/" + "@openzeppelin/contracts-v4/proxy/transparent/" ); function readInterface(path: string, fileName: string, solFileName?: string) { @@ -34,7 +34,7 @@ const L2_WETH_INTERFACE = readInterface(l2BridgeArtifactsPath, "L2WrappedBaseTok const TRANSPARENT_UPGRADEABLE_PROXY = readInterface( openzeppelinTransparentProxyArtifactsPath, "ITransparentUpgradeableProxy", - "TransparentUpgradeableProxy" + "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol:TransparentUpgradeableProxy" ); function getL2Calldata(l2SharedBridgeAddress: string, l1WethTokenAddress: string, l2WethTokenImplAddress: string) { diff --git a/l1-contracts/scripts/migrate-governance.ts b/l1-contracts/scripts/migrate-governance.ts index f8f44a8b6..0c04a79c5 100644 --- a/l1-contracts/scripts/migrate-governance.ts +++ b/l1-contracts/scripts/migrate-governance.ts @@ -23,7 +23,7 @@ const priorityTxMaxGasLimit = BigNumber.from(getNumberFromEnv("CONTRACTS_PRIORIT const l2SharedBridgeABI = JSON.parse( fs - .readFileSync("../l2-contracts/artifacts-zk/contracts-preprocessed/bridge/L2SharedBridge.sol/L2SharedBridge.json") + .readFileSync("../l2-contracts/artifacts-zk/contracts-preprocessed/bridge/L2AssetRouter.sol/L2SharedBridge.json") .toString() ).abi; diff --git a/l1-contracts/scripts/register-hyperchain.ts b/l1-contracts/scripts/register-zk-chain.ts similarity index 80% rename from l1-contracts/scripts/register-hyperchain.ts rename to l1-contracts/scripts/register-zk-chain.ts index cee5eac01..b16f81fa8 100644 --- a/l1-contracts/scripts/register-hyperchain.ts +++ b/l1-contracts/scripts/register-zk-chain.ts @@ -8,7 +8,7 @@ import * as fs from "fs"; import * as path from "path"; import { Deployer } from "../src.ts/deploy"; import { GAS_MULTIPLIER, web3Provider } from "./utils"; -import { ADDRESS_ONE } from "../src.ts/utils"; +import { ADDRESS_ONE, encodeNTVAssetId, isCurrentNetworkLocal } from "../src.ts/utils"; import { getTokens } from "../src.ts/deploy-token"; const ETH_TOKEN_ADDRESS = ADDRESS_ONE; @@ -55,21 +55,18 @@ const chooseBaseTokenAddress = async (name?: string, address?: string) => { async function main() { const program = new Command(); - program.version("0.1.0").name("register-hyperchain").description("register hyperchains"); + program.version("0.1.0").name("register-zk-chain").description("register zk-chains"); program .option("--private-key ") - .option("--chain-id ") .option("--gas-price ") .option("--nonce ") .option("--governor-address ") - .option("--create2-salt ") - .option("--diamond-upgrade-init ") .option("--only-verifier") .option("--validium-mode") .option("--base-token-name ") .option("--base-token-address ") - .option("--use-governance ") + .option("--use-governance") .option("--token-multiplier-setter-address ") .action(async (cmd) => { const deployWallet = cmd.privateKey @@ -95,21 +92,33 @@ async function main() { deployWallet, ownerAddress, verbose: true, + l1ChainId: process.env.CONTRACTS_L1_CHAIN_ID || "31337", }); const baseTokenAddress = await chooseBaseTokenAddress(cmd.baseTokenName, cmd.baseTokenAddress); await checkTokenAddress(baseTokenAddress); console.log(`Using base token address: ${baseTokenAddress}`); - - const useGovernance = !!cmd.useGovernance && cmd.useGovernance === "true"; - - if (!(await deployer.bridgehubContract(deployWallet).tokenIsRegistered(baseTokenAddress))) { - await deployer.registerToken(baseTokenAddress, useGovernance); + console.log(deployer.addresses.Bridgehub.BridgehubProxy); + const baseTokenAssetId = encodeNTVAssetId(deployer.l1ChainId, baseTokenAddress); + if (!(await deployer.bridgehubContract(deployWallet).assetIdIsRegistered(baseTokenAssetId))) { + await deployer.registerTokenBridgehub(baseTokenAddress, cmd.useGovernance); } + if (baseTokenAddress != ETH_TOKEN_ADDRESS) { + await deployer.registerTokenInNativeTokenVault(baseTokenAddress); + } + await deployer.registerZKChain( + baseTokenAssetId, + cmd.validiumMode, + null, + gasPrice, + true, + null, + null, + cmd.useGovernance, + isCurrentNetworkLocal() || cmd.localLegacyBridgeTesting + ); const tokenMultiplierSetterAddress = cmd.tokenMultiplierSetterAddress || ""; - - await deployer.registerHyperchain(baseTokenAddress, cmd.validiumMode, null, gasPrice, useGovernance); if (tokenMultiplierSetterAddress != "") { console.log(`Using token multiplier setter address: ${tokenMultiplierSetterAddress}`); await deployer.setTokenMultiplierSetterAddress(tokenMultiplierSetterAddress); diff --git a/l1-contracts/scripts/revert-reason.ts b/l1-contracts/scripts/revert-reason.ts index 2816f282b..2cd8eae83 100644 --- a/l1-contracts/scripts/revert-reason.ts +++ b/l1-contracts/scripts/revert-reason.ts @@ -7,10 +7,18 @@ import { Interface } from "ethers/lib/utils"; import { web3Url } from "./utils"; const erc20BridgeInterface = new Interface(hardhat.artifacts.readArtifactSync("L1ERC20Bridge").abi); -const zkSyncInterface = new Interface(hardhat.artifacts.readArtifactSync("IZkSync").abi); +const zkSyncInterface = new Interface(hardhat.artifacts.readArtifactSync("IZKChain").abi); const verifierInterface = new Interface(hardhat.artifacts.readArtifactSync("Verifier").abi); - -const interfaces = [erc20BridgeInterface, zkSyncInterface, verifierInterface]; +const bridgehubInterface = new Interface(hardhat.artifacts.readArtifactSync("Bridgehub").abi); +const sharedBridgeInterface = new Interface(hardhat.artifacts.readArtifactSync("L1SharedBridge").abi); + +const interfaces = [ + erc20BridgeInterface, + zkSyncInterface, + verifierInterface, + bridgehubInterface, + sharedBridgeInterface, +]; function decodeTransaction(contractInterface, tx) { try { diff --git a/l1-contracts/scripts/setup-legacy-bridge-era.ts b/l1-contracts/scripts/setup-legacy-bridge-era.ts index ffea4ef2d..37eeef9cc 100644 --- a/l1-contracts/scripts/setup-legacy-bridge-era.ts +++ b/l1-contracts/scripts/setup-legacy-bridge-era.ts @@ -15,8 +15,8 @@ import { web3Provider, GAS_MULTIPLIER } from "./utils"; import { deployedAddressesFromEnv } from "../src.ts/deploy-utils"; import { ethTestConfig, getAddressFromEnv } from "../src.ts/utils"; import { hashL2Bytecode } from "../../l2-contracts/src/utils"; -import { Provider } from "zksync-web3"; -import beaconProxy = require("../../l2-contracts/artifacts-zk/@openzeppelin/contracts/proxy/beacon/BeaconProxy.sol/BeaconProxy.json"); +import { Provider } from "zksync-ethers"; +import beaconProxy = require("../../l2-contracts/artifacts-zk/@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol/BeaconProxy.json"); const provider = web3Provider(); @@ -64,7 +64,9 @@ async function main() { await deployer.deploySharedBridgeImplementation(create2Salt, { nonce }); - const proxyAdminInterface = new Interface(hardhat.artifacts.readArtifactSync("ProxyAdmin").abi); + const proxyAdminInterface = new Interface( + hardhat.artifacts.readArtifactSync("@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol:ProxyAdmin").abi + ); let calldata = proxyAdminInterface.encodeFunctionData("upgrade(address,address)", [ deployer.addresses.Bridges.SharedBridgeProxy, deployer.addresses.Bridges.SharedBridgeImplementation, @@ -92,6 +94,7 @@ async function main() { ); const l2SharedBridgeAddress = getAddressFromEnv("CONTRACTS_L2_SHARED_BRIDGE_ADDR"); + const L2NativeTokenVaultAddress = getAddressFromEnv("CONTRACTS_L2_NATIVE_TOKEN_VAULT_PROXY_ADDR"); const l2TokenBytecodeHash = hashL2Bytecode(beaconProxy.bytecode); const l2Provider = new Provider(process.env.API_WEB3_JSON_RPC_HTTP_URL); // For the server to start up. @@ -102,12 +105,12 @@ async function main() { // Wait a bit more after the server is ready to ensure that all of its components are ready. await sleep(2); - const l2SharedBridge = new ethers.Contract( - l2SharedBridgeAddress, + const L2NativeTokenVault = new ethers.Contract( + L2NativeTokenVaultAddress, ["function l2TokenBeacon() view returns (address)"], l2Provider ); - const l2TokenBeacon = await l2SharedBridge.l2TokenBeacon(); + const l2TokenBeacon = await L2NativeTokenVault.l2TokenBeacon(); console.log("Retrieved storage values for TestERC20Bridge:"); console.log("l2SharedBridgeAddress:", l2SharedBridgeAddress); @@ -115,7 +118,8 @@ async function main() { console.log("l2TokenBytecodeHash:", ethers.utils.hexlify(l2TokenBytecodeHash)); // set storage values - const tx = await dummyBridge.setValues(l2SharedBridgeAddress, l2TokenBeacon, l2TokenBytecodeHash); + // FIXME(EVM-716): we provide the `L2NativeTokenVaultAddress` as the "shared bridge value" as it is only used for calculating of L2 token addresses. + const tx = await dummyBridge.setValues(L2NativeTokenVaultAddress, l2TokenBeacon, l2TokenBytecodeHash); await tx.wait(); console.log("Set storage values for TestERC20Bridge"); diff --git a/l1-contracts/scripts/sync-layer.ts b/l1-contracts/scripts/sync-layer.ts new file mode 100644 index 000000000..b4e20f873 --- /dev/null +++ b/l1-contracts/scripts/sync-layer.ts @@ -0,0 +1,513 @@ +// hardhat import should be the first import in the file +// eslint-disable-next-line @typescript-eslint/no-unused-vars +import * as hardhat from "hardhat"; +import { Command } from "commander"; +import { Wallet, ethers } from "ethers"; +import { Deployer } from "../src.ts/deploy"; +import { formatUnits, parseUnits } from "ethers/lib/utils"; +import { web3Provider, GAS_MULTIPLIER, SYSTEM_CONFIG } from "./utils"; +import { deployedAddressesFromEnv } from "../src.ts/deploy-utils"; +import { initialBridgehubDeployment } from "../src.ts/deploy-process"; +import { + ethTestConfig, + getAddressFromEnv, + getNumberFromEnv, + ADDRESS_ONE, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + priorityTxMaxGasLimit, + L2_BRIDGEHUB_ADDRESS, + computeL2Create2Address, + DIAMOND_CUT_DATA_ABI_STRING, +} from "../src.ts/utils"; + +import { Wallet as ZkWallet, Provider as ZkProvider, utils as zkUtils } from "zksync-ethers"; +import { IChainTypeManagerFactory } from "../typechain/IChainTypeManagerFactory"; +import { IDiamondInitFactory } from "../typechain/IDiamondInitFactory"; +import { TestnetERC20TokenFactory } from "../typechain/TestnetERC20TokenFactory"; +import { BOOTLOADER_FORMAL_ADDRESS } from "zksync-ethers/build/utils"; + +const provider = web3Provider(); + +async function main() { + const program = new Command(); + + program.version("0.1.0").name("deploy").description("deploy L1 contracts"); + + program + .command("compute-migrated-chain-address") + .requiredOption("--chain-id ") + .option("--private-key ") + .action(async (cmd) => { + if (process.env.CONTRACTS_BASE_NETWORK_ZKSYNC !== "true") { + throw new Error("This script is only for zkSync network"); + } + + const provider = new ZkProvider(process.env.API_WEB3_JSON_RPC_HTTP_URL); + const ethProvider = new ethers.providers.JsonRpcProvider(process.env.ETH_CLIENT_WEB3_URL); + const deployWallet = cmd.privateKey + ? new ZkWallet(cmd.privateKey, provider) + : (ZkWallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider) as ethers.Wallet | ZkWallet); + + const deployer = new Deployer({ + deployWallet, + addresses: deployedAddressesFromEnv(), + verbose: true, + }); + + deployer.addresses.StateTransition.AdminFacet = getAddressFromEnv("GATEWAY_ADMIN_FACET_ADDR"); + deployer.addresses.StateTransition.MailboxFacet = getAddressFromEnv("GATEWAY_MAILBOX_FACET_ADDR"); + deployer.addresses.StateTransition.ExecutorFacet = getAddressFromEnv("GATEWAY_EXECUTOR_FACET_ADDR"); + deployer.addresses.StateTransition.GettersFacet = getAddressFromEnv("GATEWAY_GETTERS_FACET_ADDR"); + deployer.addresses.StateTransition.DiamondInit = getAddressFromEnv("GATEWAY_DIAMOND_INIT_ADDR"); + deployer.addresses.StateTransition.Verifier = getAddressFromEnv("GATEWAY_VERIFIER_ADDR"); + deployer.addresses.BlobVersionedHashRetriever = getAddressFromEnv("GATEWAY_BLOB_VERSIONED_HASH_RETRIEVER_ADDR"); + deployer.addresses.ValidatorTimeLock = getAddressFromEnv("GATEWAY_VALIDATOR_TIMELOCK_ADDR"); + deployer.addresses.Bridges.SharedBridgeProxy = getAddressFromEnv("CONTRACTS_L2_SHARED_BRIDGE_ADDR"); + deployer.addresses.StateTransition.StateTransitionProxy = getAddressFromEnv( + "GATEWAY_STATE_TRANSITION_PROXY_ADDR" + ); + + const stm = deployer.chainTypeManagerContract(provider); + const bridgehub = deployer.bridgehubContract(ethProvider); + const diamondInit = IDiamondInitFactory.connect(deployer.addresses.StateTransition.DiamondInit, provider); + const bytes32 = (x: ethers.BigNumberish) => ethers.utils.hexZeroPad(ethers.utils.hexlify(x), 32); + + const diamondCut = await deployer.initialZkSyncZKChainDiamondCut([], true); + const mandatoryInitData = [ + diamondInit.interface.getSighash("initialize"), + bytes32(parseInt(cmd.chainId)), + bytes32(getAddressFromEnv("GATEWAY_BRIDGEHUB_PROXY_ADDR")), + bytes32(deployer.addresses.StateTransition.StateTransitionProxy), + bytes32(await stm.protocolVersion()), + bytes32(deployer.deployWallet.address), + bytes32(deployer.addresses.ValidatorTimeLock), + await bridgehub.baseTokenAssetId(cmd.chainId), + bytes32(deployer.addresses.Bridges.SharedBridgeProxy), + await stm.storedBatchZero(), + ]; + + diamondCut.initCalldata = ethers.utils.hexConcat([...mandatoryInitData, diamondCut.initCalldata]); + const bytecode = hardhat.artifacts.readArtifactSync("DiamondProxy").bytecode; + const gatewayChainId = (await provider.getNetwork()).chainId; + const constructorData = new ethers.utils.AbiCoder().encode( + ["uint256", DIAMOND_CUT_DATA_ABI_STRING], + [gatewayChainId, diamondCut] + ); + + const address = computeL2Create2Address( + deployer.addresses.StateTransition.StateTransitionProxy, + bytecode, + constructorData, + ethers.constants.HashZero + ); + + console.log(address); + }); + + program + .command("deploy-sync-layer-contracts") + .option("--private-key ") + .option("--chain-id ") + .option("--gas-price ") + .option("--owner-address ") + .option("--create2-salt ") + .option("--diamond-upgrade-init ") + .option("--only-verifier") + .action(async (cmd) => { + if (process.env.CONTRACTS_BASE_NETWORK_ZKSYNC !== "true") { + throw new Error("This script is only for zkSync network"); + } + + const provider = new ZkProvider(process.env.API_WEB3_JSON_RPC_HTTP_URL); + const deployWallet = cmd.privateKey + ? new ZkWallet(cmd.privateKey, provider) + : (ZkWallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider) as ethers.Wallet | ZkWallet); + + console.log(`Using deployer wallet: ${deployWallet.address}`); + + const ownerAddress = cmd.ownerAddress ? cmd.ownerAddress : deployWallet.address; + console.log(`Using owner address: ${ownerAddress}`); + + const gasPrice = cmd.gasPrice + ? parseUnits(cmd.gasPrice, "gwei") + : (await provider.getGasPrice()).mul(GAS_MULTIPLIER); + console.log(`Using gas price: ${formatUnits(gasPrice, "gwei")} gwei`); + + const nonce = await deployWallet.getTransactionCount(); + console.log(`Using nonce: ${nonce}`); + + const create2Salt = cmd.create2Salt ? cmd.create2Salt : ethers.utils.hexlify(ethers.utils.randomBytes(32)); + + const deployer = new Deployer({ + deployWallet, + addresses: deployedAddressesFromEnv(), + ownerAddress, + verbose: true, + }); + + if (deployer.isZkMode()) { + console.log("Deploying on a zkSync network!"); + } + deployer.addresses.Bridges.SharedBridgeProxy = getAddressFromEnv("CONTRACTS_L2_SHARED_BRIDGE_ADDR"); + + await initialBridgehubDeployment(deployer, [], gasPrice, true, create2Salt); + await initialBridgehubDeployment(deployer, [], gasPrice, false, create2Salt); + }); + + program + .command("register-sync-layer") + .option("--private-key ") + .option("--chain-id ") + .option("--gas-price ") + .option("--owner-address ") + .option("--create2-salt ") + .option("--diamond-upgrade-init ") + .option("--only-verifier") + .action(async (cmd) => { + // Now, all the operations are done on L1 + const deployWallet = cmd.privateKey + ? new Wallet(cmd.privateKey, provider) + : Wallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider); + + const ownerAddress = cmd.ownerAddress ? cmd.ownerAddress : deployWallet.address; + console.log(`Using owner address: ${ownerAddress}`); + const deployer = new Deployer({ + deployWallet, + addresses: deployedAddressesFromEnv(), + ownerAddress, + verbose: true, + }); + await registerSLContractsOnL1(deployer); + }); + + program + .command("migrate-to-sync-layer") + .option("--private-key ") + .option("--chain-id ") + .option("--gas-price ") + .option("--owner-address ") + .option("--create2-salt ") + .option("--diamond-upgrade-init ") + .option("--only-verifier") + .action(async (cmd) => { + console.log("Starting migration of the current chain to sync layer"); + + const deployWallet = cmd.privateKey + ? new Wallet(cmd.privateKey, provider) + : Wallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider); + const ownerAddress = cmd.ownerAddress ? cmd.ownerAddress : deployWallet.address; + + const deployer = new Deployer({ + deployWallet, + addresses: deployedAddressesFromEnv(), + ownerAddress, + verbose: true, + }); + + const gatewayChainId = getNumberFromEnv("GATEWAY_CHAIN_ID"); + const gasPrice = cmd.gasPrice + ? parseUnits(cmd.gasPrice, "gwei") + : (await provider.getGasPrice()).mul(GAS_MULTIPLIER); + + const currentChainId = getNumberFromEnv("CHAIN_ETH_ZKSYNC_NETWORK_ID"); + + const ctm = deployer.chainTypeManagerContract(deployer.deployWallet); + + const counterPart = getAddressFromEnv("GATEWAY_STATE_TRANSITION_PROXY_ADDR"); + + // FIXME: do it more gracefully + deployer.addresses.StateTransition.AdminFacet = getAddressFromEnv("GATEWAY_ADMIN_FACET_ADDR"); + deployer.addresses.StateTransition.MailboxFacet = getAddressFromEnv("GATEWAY_MAILBOX_FACET_ADDR"); + deployer.addresses.StateTransition.ExecutorFacet = getAddressFromEnv("GATEWAY_EXECUTOR_FACET_ADDR"); + deployer.addresses.StateTransition.GettersFacet = getAddressFromEnv("GATEWAY_GETTERS_FACET_ADDR"); + deployer.addresses.StateTransition.Verifier = getAddressFromEnv("GATEWAY_VERIFIER_ADDR"); + deployer.addresses.BlobVersionedHashRetriever = getAddressFromEnv("GATEWAY_BLOB_VERSIONED_HASH_RETRIEVER_ADDR"); + deployer.addresses.StateTransition.DiamondInit = getAddressFromEnv("GATEWAY_DIAMOND_INIT_ADDR"); + + const receipt = await deployer.moveChainToGateway(gatewayChainId, gasPrice); + + const gatewayAddress = await ctm.getZKChain(gatewayChainId); + + const l2TxHash = zkUtils.getL2HashFromPriorityOp(receipt, gatewayAddress); + + console.log("Hash of the transaction on SL chain: ", l2TxHash); + + const gatewayProvider = new ZkProvider(process.env.GATEWAY_API_WEB3_JSON_RPC_HTTP_URL); + + const txL2Handle = gatewayProvider.getL2TransactionFromPriorityOp( + await deployWallet.provider.getTransaction(receipt.transactionHash) + ); + + const receiptOnSL = await (await txL2Handle).wait(); + console.log("Finalized on SL with hash:", receiptOnSL.transactionHash); + + const ctmOnSL = IChainTypeManagerFactory.connect(counterPart, gatewayProvider); + const zkChainAddress = await ctmOnSL.getZKChain(currentChainId); + console.log(`CONTRACTS_DIAMOND_PROXY_ADDR=${zkChainAddress}`); + + console.log("Success!"); + }); + + program + .command("recover-from-failed-migration") + .option("--private-key ") + .option("--failed-tx-l2-hash ") + .option("--chain-id ") + .option("--gas-price ") + .option("--owner-address ") + .option("--create2-salt ") + .option("--diamond-upgrade-init ") + .option("--only-verifier") + .action(async (cmd) => { + const gatewayChainId = getNumberFromEnv("GATEWAY_CHAIN_ID"); + const gatewayProvider = new ZkProvider(process.env.GATEWAY_API_WEB3_JSON_RPC_HTTP_URL); + console.log("Obtaining proof..."); + const proof = await getTxFailureProof(gatewayProvider, cmd.failedTxL2Hash); + + const deployWallet = cmd.privateKey + ? new Wallet(cmd.privateKey, provider) + : Wallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider); + console.log(deployWallet.address); + const ownerAddress = cmd.ownerAddress ? cmd.ownerAddress : deployWallet.address; + const deployer = new Deployer({ + deployWallet, + addresses: deployedAddressesFromEnv(), + ownerAddress, + verbose: true, + }); + + const zkChain = deployer.stateTransitionContract(deployer.deployWallet); + + console.log(await zkChain.getAdmin()); + + console.log("Executing recovery..."); + + await ( + await zkChain.recoverFromFailedMigrationToGateway( + gatewayChainId, + proof.l2BatchNumber, + proof.l2MessageIndex, + proof.l2TxNumberInBatch, + proof.merkleProof + ) + ).wait(); + + console.log("Success!"); + }); + + program + .command("prepare-validators") + .option("--private-key ") + .option("--chain-id ") + .option("--gas-price ") + .option("--owner-address ") + .option("--create2-salt ") + .option("--diamond-upgrade-init ") + .option("--only-verifier") + .action(async (cmd) => { + const gatewayProvider = new ZkProvider(process.env.GATEWAY_API_WEB3_JSON_RPC_HTTP_URL); + const currentChainId = getNumberFromEnv("CHAIN_ETH_ZKSYNC_NETWORK_ID"); + + // Right now the new admin is the wallet itself. + const adminWallet = cmd.privateKey + ? new ZkWallet(cmd.privateKey, gatewayProvider) + : ZkWallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(gatewayProvider); + + const operators = [ + process.env.ETH_SENDER_SENDER_OPERATOR_COMMIT_ETH_ADDR, + process.env.ETH_SENDER_SENDER_OPERATOR_BLOBS_ETH_ADDR, + ]; + + const deployer = new Deployer({ + deployWallet: adminWallet, + addresses: deployedAddressesFromEnv(), + ownerAddress: adminWallet.address, + verbose: true, + }); + + console.log("Enabling validators"); + + // FIXME: do it in cleaner way + deployer.addresses.ValidatorTimeLock = getAddressFromEnv("GATEWAY_VALIDATOR_TIMELOCK_ADDR"); + const timelock = deployer.validatorTimelock(deployer.deployWallet); + + for (const operator of operators) { + if (await timelock.validators(currentChainId, operator)) { + continue; + } + + await deployer.deployWallet.sendTransaction({ + to: operator, + value: ethers.utils.parseEther("5"), + }); + + await (await timelock.addValidator(currentChainId, operator)).wait(); + } + + // FIXME: this method includes bridgehub manipulation, but in the future it won't. + deployer.addresses.StateTransition.StateTransitionProxy = getAddressFromEnv( + "GATEWAY_STATE_TRANSITION_PROXY_ADDR" + ); + deployer.addresses.Bridgehub.BridgehubProxy = getAddressFromEnv("GATEWAY_BRIDGEHUB_PROXY_ADDR"); + + const zkChain = deployer.stateTransitionContract(deployer.deployWallet); + + console.log("Setting SL DA validators"); + // This logic should be distinctive between Validium and Rollup + const l1DaValidator = getAddressFromEnv("GATEWAY_L1_RELAYED_SL_DA_VALIDATOR"); + const l2DaValidator = getAddressFromEnv("CONTRACTS_L2_DA_VALIDATOR_ADDR"); + await (await zkChain.setDAValidatorPair(l1DaValidator, l2DaValidator)).wait(); + + console.log("Success!"); + }); + + await program.parseAsync(process.argv); +} + +async function registerSLContractsOnL1(deployer: Deployer) { + /// CTM asset info + /// l2Bridgehub in L1Bridghub + + const chainId = getNumberFromEnv("CHAIN_ETH_ZKSYNC_NETWORK_ID"); + + console.log(`Gateway chain Id: ${chainId}`); + + const l1Bridgehub = deployer.bridgehubContract(deployer.deployWallet); + const l1CTM = deployer.chainTypeManagerContract(deployer.deployWallet); + console.log(deployer.addresses.StateTransition.StateTransitionProxy); + const gatewayAddress = await l1Bridgehub.getZKChain(chainId); + // this script only works when owner is the deployer + console.log("Registering Gateway chain id on the CTM"); + const receipt1 = await deployer.executeUpgrade( + l1Bridgehub.address, + 0, + l1Bridgehub.interface.encodeFunctionData("registerSettlementLayer", [chainId, true]) + ); + + console.log("Registering Gateway as settlement layer on the L1", receipt1.transactionHash); + + const gasPrice = (await deployer.deployWallet.provider.getGasPrice()).mul(GAS_MULTIPLIER); + const value = ( + await l1Bridgehub.l2TransactionBaseCost(chainId, gasPrice, priorityTxMaxGasLimit, REQUIRED_L2_GAS_PRICE_PER_PUBDATA) + ).mul(10); + const baseTokenAddress = await l1Bridgehub.baseToken(chainId); + const ethIsBaseToken = baseTokenAddress == ADDRESS_ONE; + if (!ethIsBaseToken) { + const baseToken = TestnetERC20TokenFactory.connect(baseTokenAddress, this.deployWallet); + await (await baseToken.transfer(this.addresses.Governance, value)).wait(); + await this.executeUpgrade( + baseTokenAddress, + 0, + baseToken.interface.encodeFunctionData("approve", [this.addresses.Bridges.SharedBridgeProxy, value.mul(2)]) + ); + } + const ctmDeploymentTracker = deployer.ctmDeploymentTracker(deployer.deployWallet); + const assetRouter = deployer.defaultSharedBridge(deployer.deployWallet); + const assetId = await l1Bridgehub.ctmAssetIdFromChainId(chainId); + + // Setting the L2 bridgehub as the counterpart for the CTM asset + const receipt2 = await deployer.executeUpgrade( + l1Bridgehub.address, + ethIsBaseToken ? value : 0, + l1Bridgehub.interface.encodeFunctionData("requestL2TransactionTwoBridges", [ + { + chainId, + mintValue: value, + l2Value: 0, + l2GasLimit: priorityTxMaxGasLimit, + l2GasPerPubdataByteLimit: SYSTEM_CONFIG.requiredL2GasPricePerPubdata, + refundRecipient: deployer.deployWallet.address, + secondBridgeAddress: assetRouter.address, + secondBridgeValue: 0, + secondBridgeCalldata: + "0x02" + + ethers.utils.defaultAbiCoder.encode(["bytes32", "address"], [assetId, L2_BRIDGEHUB_ADDRESS]).slice(2), + }, + ]) + ); + const l2TxHash = zkUtils.getL2HashFromPriorityOp(receipt2, gatewayAddress); + console.log("CTM asset registered in L2SharedBridge on SL tx hash: ", receipt2.transactionHash); + console.log("CTM asset registered in L2SharedBridge on SL l2 tx hash: ", l2TxHash); + + const l2CTMAddress = getAddressFromEnv("GATEWAY_STATE_TRANSITION_PROXY_ADDR"); + + // Whitelisting the CTM address on L2 + const receipt3 = await deployer.executeUpgradeOnL2( + chainId, + L2_BRIDGEHUB_ADDRESS, + gasPrice, + l1Bridgehub.interface.encodeFunctionData("addChainTypeManager", [l2CTMAddress]), + priorityTxMaxGasLimit + ); + const l2TxHash2dot5 = zkUtils.getL2HashFromPriorityOp(receipt3, gatewayAddress); + console.log(`L2 CTM ,l2 txHash: ${l2TxHash2dot5}`); + console.log(`L2 CTM address ${l2CTMAddress} registered on gateway, txHash: ${receipt3.transactionHash}`); + + // Setting the corresponding CTM address on L2. + const receipt4 = await deployer.executeUpgrade( + l1Bridgehub.address, + value, + l1Bridgehub.interface.encodeFunctionData("requestL2TransactionTwoBridges", [ + { + chainId, + mintValue: value, + l2Value: 0, + l2GasLimit: priorityTxMaxGasLimit, + l2GasPerPubdataByteLimit: SYSTEM_CONFIG.requiredL2GasPricePerPubdata, + refundRecipient: deployer.deployWallet.address, + secondBridgeAddress: ctmDeploymentTracker.address, + secondBridgeValue: 0, + secondBridgeCalldata: + "0x01" + ethers.utils.defaultAbiCoder.encode(["address", "address"], [l1CTM.address, l2CTMAddress]).slice(2), + }, + ]) + ); + const l2TxHash3 = zkUtils.getL2HashFromPriorityOp(receipt4, gatewayAddress); + console.log("CTM asset registered in L2 Bridgehub on SL", receipt4.transactionHash); + console.log("CTM asset registered in L2 Bridgehub on SL l2TxHash", l2TxHash3); +} + +// TODO: maybe move it to SDK +async function getTxFailureProof(provider: ZkProvider, l2TxHash: string) { + const receipt = await provider.getTransactionReceipt(ethers.utils.hexlify(l2TxHash)); + const successL2ToL1LogIndex = receipt.l2ToL1Logs.findIndex( + (l2ToL1log) => l2ToL1log.sender == BOOTLOADER_FORMAL_ADDRESS && l2ToL1log.key == l2TxHash + ); + const successL2ToL1Log = receipt.l2ToL1Logs[successL2ToL1LogIndex]; + if (successL2ToL1Log.value != ethers.constants.HashZero) { + throw new Error("The tx was successful"); + } + + const proof = await provider.getLogProof(l2TxHash, successL2ToL1LogIndex); + return { + l2BatchNumber: receipt.l1BatchNumber, + l2MessageIndex: proof.id, + l2TxNumberInBatch: receipt.l1BatchTxIndex, + merkleProof: proof.proof, + }; +} + +main() + .then(() => process.exit(0)) + .catch((err) => { + console.error("Error:", err); + process.exit(1); + }); diff --git a/l1-contracts/scripts/token-migration.ts b/l1-contracts/scripts/token-migration.ts index b18260ca3..a3b4ef67d 100644 --- a/l1-contracts/scripts/token-migration.ts +++ b/l1-contracts/scripts/token-migration.ts @@ -197,7 +197,7 @@ async function prepareGovernanceTokenMigrationCall( delay: number ) { const governanceAbi = new ethers.utils.Interface((await hardhat.artifacts.readArtifact("IGovernance")).abi); - const sharedBridgeAbi = new ethers.utils.Interface((await hardhat.artifacts.readArtifact("L1SharedBridge")).abi); + const sharedBridgeAbi = new ethers.utils.Interface((await hardhat.artifacts.readArtifact("L1AssetRouter")).abi); const calls = tokens.map((token) => { const target = token == utils.ETH_ADDRESS_IN_CONTRACTS ? eraChainAddress : l1LegacyBridgeAddr; diff --git a/l1-contracts/scripts/upgrade-consistency-checker.ts b/l1-contracts/scripts/upgrade-consistency-checker.ts index 5da064a04..798f6f36a 100644 --- a/l1-contracts/scripts/upgrade-consistency-checker.ts +++ b/l1-contracts/scripts/upgrade-consistency-checker.ts @@ -10,16 +10,17 @@ import { BigNumber, ethers } from "ethers"; import { utils } from "zksync-ethers"; import type { FacetCut } from "../src.ts/diamondCut"; import { getCurrentFacetCutsForAdd } from "../src.ts/diamondCut"; +import { encodeNTVAssetId } from "../src.ts/utils"; // Things that still have to be manually double checked: // 1. Contracts must be verified. -// 2. Getter methods in STM. +// 2. Getter methods in CTM. // List the contracts that should become the upgrade targets const genesisUpgrade = process.env.CONTRACTS_GENESIS_UPGRADE_ADDR!; const validatorTimelockDeployTx = "0xde4ef2b77241b605acaa1658ff8815df0911bf81555a80c9cbdde42fbcaaea30"; const validatorTimelock = process.env.CONTRACTS_VALIDATOR_TIMELOCK_ADDR!; -const upgradeHyperchains = process.env.CONTRACTS_HYPERCHAIN_UPGRADE_ADDR!; +const upgradeZKChains = process.env.CONTRACTS_ZK_CHAIN_UPGRADE_ADDR!; const verifier = process.env.CONTRACTS_VERIFIER_ADDR!; const proxyAdmin = process.env.CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR!; @@ -35,10 +36,10 @@ const gettersFacet = process.env.CONTRACTS_GETTERS_FACET_ADDR!; const diamondInit = process.env.CONTRACTS_DIAMOND_INIT_ADDR!; -const stmImplDeployTx = "0xe01c0bb497017a25c92bfc712e370e8f900554b107fe0b6022976d05c349f2b6"; -const stmImpl = process.env.CONTRACTS_STATE_TRANSITION_IMPL_ADDR!; -const stmDeployTx = "0x514bbf46d227eee8567825bf5c8ee1855aa8a1916f7fee7b191e2e3d5ecba849"; -const stm = process.env.CONTRACTS_STATE_TRANSITION_PROXY_ADDR!; +const ctmImplDeployTx = "0xe01c0bb497017a25c92bfc712e370e8f900554b107fe0b6022976d05c349f2b6"; +const ctmImpl = process.env.CONTRACTS_STATE_TRANSITION_IMPL_ADDR!; +const ctmDeployTx = "0x514bbf46d227eee8567825bf5c8ee1855aa8a1916f7fee7b191e2e3d5ecba849"; +const ctm = process.env.CONTRACTS_STATE_TRANSITION_PROXY_ADDR!; const sharedBridgeImplDeployTx = "0x074204db79298c2f6beccae881c2ad7321c331e97fb4bd93adce2eb23bf17a17"; const sharedBridgeImpl = process.env.CONTRACTS_L1_SHARED_BRIDGE_IMPL_ADDR!; @@ -52,9 +53,10 @@ const initialOwner = "0x71d84c3404a6ae258E6471d4934B96a2033F9438"; const expectedOwner = "0x71d84c3404a6ae258E6471d4934B96a2033F9438"; //process.env.CONTRACTS_GOVERNANCE_ADDR!; const expectedDelay = "75600"; const eraChainId = process.env.CONTRACTS_ERA_CHAIN_ID!; +const l1ChainId = process.env.CONTRACTS_L1_CHAIN_ID!; const expectedSalt = "0x0000000000000000000000000000000000000000000000000000000000000001"; -const expectedHyperchainAddr = "0x32400084c286cf3e17e7b677ea9583e60a000324"; -const maxNumberOfHyperchains = 100; +const expectedZKChainAddr = "0x32400084c286cf3e17e7b677ea9583e60a000324"; +const maxNumberOfZKChains = 100; const expectedStoredBatchHashZero = "0x1574fa776dec8da2071e5f20d71840bfcbd82c2bca9ad68680edfedde1710bc4"; const expectedL2BridgeAddress = "0x11f943b2c77b743AB90f4A0Ae7d5A4e7FCA3E102"; const expectedL1LegacyBridge = "0x57891966931Eb4Bb6FB81430E6cE0A03AAbDe063"; @@ -112,7 +114,9 @@ async function extractInitCode(data: string) { async function extractProxyInitializationData(contract: ethers.Contract, data: string) { const initCode = await extractInitCode(data); - const artifact = await hardhat.artifacts.readArtifact("TransparentUpgradeableProxy"); + const artifact = await hardhat.artifacts.readArtifact( + "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol:TransparentUpgradeableProxy" + ); // Deployment tx is a concatenation of the init code and the constructor data // constructor has the following type `constructor(address _logic, address admin_, bytes memory _data)` @@ -274,7 +278,7 @@ async function extractProxyInitializationData(contract: ethers.Contract, data: s throw new Error("L2 default account bytecode hash is not correct"); } - console.log("STM init data correct!"); + console.log("CTM init data correct!"); } async function checkValidatorTimelock() { @@ -286,9 +290,9 @@ async function checkValidatorTimelock() { throw new Error("ValidatorTimelock owner is not correct"); } - const usedStm = await contract.stateTransitionManager(); - if (usedStm.toLowerCase() != stm.toLowerCase()) { - throw new Error("ValidatorTimelock stateTransitionManager is not correct"); + const usedCtm = await contract.chainTypeManager(); + if (usedCtm.toLowerCase() != ctm.toLowerCase()) { + throw new Error("ValidatorTimelock chainTypeManager is not correct"); } const validatorOneIsSet = await contract.validators(eraChainId, validatorOne); @@ -324,9 +328,9 @@ async function checkBridgehub() { throw new Error("Bridgehub baseToken is not correct"); } - const hyperchain = await contract.getHyperchain(eraChainId); - if (hyperchain.toLowerCase() != expectedHyperchainAddr.toLowerCase()) { - throw new Error("Bridgehub hyperchain is not correct"); + const zkChain = await contract.getZKChain(eraChainId); + if (zkChain.toLowerCase() != expectedZKChainAddr.toLowerCase()) { + throw new Error("Bridgehub zkChain is not correct"); } const sharedBridge = await contract.sharedBridge(); @@ -334,17 +338,21 @@ async function checkBridgehub() { throw new Error("Bridgehub sharedBridge is not correct"); } - const usedSTM = await contract.stateTransitionManager(eraChainId); - if (usedSTM.toLowerCase() != stm.toLowerCase()) { - throw new Error("Bridgehub stateTransitionManager is not correct"); + const usedCTM = await contract.chainTypeManager(eraChainId); + if (usedCTM.toLowerCase() != ctm.toLowerCase()) { + throw new Error("Bridgehub chainTypeManager is not correct"); } - const isRegistered = await contract.stateTransitionManagerIsRegistered(usedSTM); + const isRegistered = await contract.chainTypeManagerIsRegistered(usedCTM); if (!isRegistered) { - throw new Error("Bridgehub stateTransitionManager is not registered"); + throw new Error("Bridgehub chainTypeManager is not registered"); } - const tokenIsRegistered = await contract.tokenIsRegistered(utils.ETH_ADDRESS_IN_CONTRACTS); + const baseTokenAssetId = encodeNTVAssetId( + parseInt(l1ChainId), + ethers.utils.hexZeroPad(utils.ETH_ADDRESS_IN_CONTRACTS, 32) + ); + const tokenIsRegistered = contract.assetIdIsRegistered(baseTokenAssetId); if (!tokenIsRegistered) { throw new Error("Bridgehub token is not registered"); } @@ -360,65 +368,65 @@ async function checkMailbox() { console.log("Mailbox is correct!"); } -async function checkSTMImpl() { - const artifact = await hardhat.artifacts.readArtifact("StateTransitionManager"); - const contract = new ethers.Contract(stmImpl, artifact.abi, l1Provider); +async function checkCTMImpl() { + const artifact = await hardhat.artifacts.readArtifact("ChainTypeManager"); + const contract = new ethers.Contract(ctmImpl, artifact.abi, l1Provider); - await checkCorrectInitCode(stmImplDeployTx, contract, artifact.bytecode, [bridgeHub, maxNumberOfHyperchains]); + await checkCorrectInitCode(ctmImplDeployTx, contract, artifact.bytecode, [bridgeHub, maxNumberOfZKChains]); - console.log("STM impl correct!"); + console.log("CTM impl correct!"); } -async function checkSTM() { - const artifact = await hardhat.artifacts.readArtifact("StateTransitionManager"); +async function checkCTM() { + const artifact = await hardhat.artifacts.readArtifact("ChainTypeManager"); - const contract = new ethers.Contract(stm, artifact.abi, l1Provider); + const contract = new ethers.Contract(ctm, artifact.abi, l1Provider); const usedBH = await contract.BRIDGE_HUB(); if (usedBH.toLowerCase() != bridgeHub.toLowerCase()) { - throw new Error("STM bridgeHub is not correct"); + throw new Error("CTM bridgeHub is not correct"); } - const usedMaxNumberOfHyperchains = (await contract.MAX_NUMBER_OF_HYPERCHAINS()).toNumber(); - if (usedMaxNumberOfHyperchains != maxNumberOfHyperchains) { - throw new Error("STM maxNumberOfHyperchains is not correct"); + const usedMaxNumberOfZKChains = (await contract.MAX_NUMBER_OF_ZK_CHAINS()).toNumber(); + if (usedMaxNumberOfZKChains != maxNumberOfZKChains) { + throw new Error("CTM maxNumberOfZKChains is not correct"); } const genUpgrade = await contract.genesisUpgrade(); if (genUpgrade.toLowerCase() != genesisUpgrade.toLowerCase()) { - throw new Error("STM genesisUpgrade is not correct"); + throw new Error("CTM genesisUpgrade is not correct"); } const storedBatchHashZero = await contract.storedBatchZero(); if (storedBatchHashZero.toLowerCase() != expectedStoredBatchHashZero.toLowerCase()) { - throw new Error("STM storedBatchHashZero is not correct"); + throw new Error("CTM storedBatchHashZero is not correct"); } const currentOwner = await contract.owner(); if (currentOwner.toLowerCase() != expectedOwner.toLowerCase()) { - throw new Error("STM owner is not correct"); + throw new Error("CTM owner is not correct"); } - console.log("STM is correct!"); + console.log("CTM is correct!"); - await extractProxyInitializationData(contract, (await l1Provider.getTransaction(stmDeployTx)).data); + await extractProxyInitializationData(contract, (await l1Provider.getTransaction(ctmDeployTx)).data); } -async function checkL1SharedBridgeImpl() { - const artifact = await hardhat.artifacts.readArtifact("L1SharedBridge"); +async function checkL1AssetRouterImpl() { + const artifact = await hardhat.artifacts.readArtifact("L1AssetRouter"); const contract = new ethers.Contract(sharedBridgeImpl, artifact.abi, l1Provider); await checkCorrectInitCode(sharedBridgeImplDeployTx, contract, artifact.bytecode, [ expectedL1WethAddress, bridgeHub, eraChainId, - expectedHyperchainAddr, + expectedZKChainAddr, ]); console.log("L1 shared bridge impl correct!"); } async function checkSharedBridge() { - const artifact = await hardhat.artifacts.readArtifact("L1SharedBridge"); + const artifact = await hardhat.artifacts.readArtifact("L1AssetRouter"); const contract = new ethers.Contract(sharedBridgeProxy, artifact.abi, l1Provider); const l2BridgeAddr = await contract.l2BridgeAddress(eraChainId); @@ -449,9 +457,11 @@ async function checkLegacyBridge() { } async function checkProxyAdmin() { - await checkIdenticalBytecode(proxyAdmin, "ProxyAdmin"); + await checkIdenticalBytecode(proxyAdmin, "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol:ProxyAdmin"); - const artifact = await hardhat.artifacts.readArtifact("ProxyAdmin"); + const artifact = await hardhat.artifacts.readArtifact( + "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol:ProxyAdmin" + ); const contract = new ethers.Contract(proxyAdmin, artifact.abi, l1Provider); const currentOwner = await contract.owner(); @@ -472,7 +482,7 @@ async function main() { program.action(async () => { await checkIdenticalBytecode(genesisUpgrade, "GenesisUpgrade"); - await checkIdenticalBytecode(upgradeHyperchains, "UpgradeHyperchains"); + await checkIdenticalBytecode(upgradeZKChains, "UpgradeZKChains"); await checkIdenticalBytecode(executorFacet, "ExecutorFacet"); await checkIdenticalBytecode(gettersFacet, "GettersFacet"); await checkIdenticalBytecode(adminFacet, "AdminFacet"); @@ -487,13 +497,13 @@ async function main() { await checkValidatorTimelock(); await checkBridgehub(); - await checkL1SharedBridgeImpl(); + await checkL1AssetRouterImpl(); await checkSharedBridge(); await checkLegacyBridge(); - await checkSTMImpl(); - await checkSTM(); + await checkCTMImpl(); + await checkCTM(); }); await program.parseAsync(process.argv); diff --git a/l1-contracts/scripts/utils.ts b/l1-contracts/scripts/utils.ts index 5ae1bceac..b1573f39f 100644 --- a/l1-contracts/scripts/utils.ts +++ b/l1-contracts/scripts/utils.ts @@ -5,6 +5,7 @@ import * as chalk from "chalk"; import { ethers } from "ethers"; import * as fs from "fs"; import * as path from "path"; +import { isCurrentNetworkLocal } from "../src.ts/utils"; const warning = chalk.bold.yellow; export const L1_TO_L2_ALIAS_OFFSET = "0x1111000000000000000000000000000000001111"; @@ -50,7 +51,7 @@ export function web3Provider() { } // Short polling interval for local network - if (network === "localhost" || network === "hardhat") { + if (isCurrentNetworkLocal()) { provider.pollingInterval = 100; } diff --git a/l1-contracts/scripts/verify.ts b/l1-contracts/scripts/verify.ts index e1726a5d6..25255bad7 100644 --- a/l1-contracts/scripts/verify.ts +++ b/l1-contracts/scripts/verify.ts @@ -1,7 +1,13 @@ // hardhat import should be the first import in the file import * as hardhat from "hardhat"; import { deployedAddressesFromEnv } from "../src.ts/deploy-utils"; -import { ethTestConfig, getNumberFromEnv, getHashFromEnv, getAddressFromEnv } from "../src.ts/utils"; +import { + getNumberFromEnv, + getHashFromEnv, + getAddressFromEnv, + isCurrentNetworkLocal, + ethTestConfig, +} from "../src.ts/utils"; import { Interface } from "ethers/lib/utils"; import { Deployer } from "../src.ts/deploy"; @@ -34,7 +40,7 @@ function verifyPromise( // Note: running all verifications in parallel might be too much for etherscan, comment out some of them if needed async function main() { - if (process.env.CHAIN_ETH_NETWORK == "localhost") { + if (isCurrentNetworkLocal()) { console.log("Skip contract verification on localhost"); return; } @@ -82,7 +88,7 @@ async function main() { const promise3 = verifyPromise(process.env.CONTRACTS_DEFAULT_UPGRADE_ADDR); promises.push(promise3); - const promise4 = verifyPromise(process.env.CONTRACTS_HYPERCHAIN_UPGRADE_ADDR); + const promise4 = verifyPromise(process.env.CONTRACTS_ZK_CHAIN_UPGRADE_ADDR); promises.push(promise4); const promise5 = verifyPromise(addresses.TransparentProxyAdmin); @@ -102,7 +108,7 @@ async function main() { ]); promises.push(promise7); - // stm + // ctm // Contracts without constructor parameters for (const address of [ @@ -121,18 +127,18 @@ async function main() { const promise8 = verifyPromise(addresses.StateTransition.StateTransitionImplementation, [ addresses.Bridgehub.BridgehubProxy, - getNumberFromEnv("CONTRACTS_MAX_NUMBER_OF_HYPERCHAINS"), + getNumberFromEnv("CONTRACTS_MAX_NUMBER_OF_ZK_CHAINS"), ]); promises.push(promise8); - const stateTransitionManager = new Interface(hardhat.artifacts.readArtifactSync("StateTransitionManager").abi); + const chainTypeManager = new Interface(hardhat.artifacts.readArtifactSync("ChainTypeManager").abi); const genesisBatchHash = getHashFromEnv("CONTRACTS_GENESIS_ROOT"); // TODO: confusing name const genesisRollupLeafIndex = getNumberFromEnv("CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX"); const genesisBatchCommitment = getHashFromEnv("CONTRACTS_GENESIS_BATCH_COMMITMENT"); - const diamondCut = await deployer.initialZkSyncHyperchainDiamondCut([]); + const diamondCut = await deployer.initialZkSyncZKChainDiamondCut([]); const protocolVersion = packSemver(...unpackStringSemVer(process.env.CONTRACTS_GENESIS_PROTOCOL_SEMANTIC_VERSION)); - const initCalldata2 = stateTransitionManager.encodeFunctionData("initialize", [ + const initCalldata2 = chainTypeManager.encodeFunctionData("initialize", [ { owner: addresses.Governance, validatorTimelock: addresses.ValidatorTimeLock, @@ -174,7 +180,7 @@ async function main() { eraDiamondProxy, ]); promises.push(promise12); - const initCalldata4 = new Interface(hardhat.artifacts.readArtifactSync("L1SharedBridge").abi).encodeFunctionData( + const initCalldata4 = new Interface(hardhat.artifacts.readArtifactSync("L1AssetRouter").abi).encodeFunctionData( "initialize", [deployWalletAddress] ); diff --git a/l1-contracts/src.ts/deploy-process.ts b/l1-contracts/src.ts/deploy-process.ts index 28629fc11..4415c8109 100644 --- a/l1-contracts/src.ts/deploy-process.ts +++ b/l1-contracts/src.ts/deploy-process.ts @@ -12,7 +12,13 @@ import type { FacetCut } from "./diamondCut"; import type { Deployer } from "./deploy"; import { getTokens } from "./deploy-token"; -import { ADDRESS_ONE } from "../src.ts/utils"; +import { + ADDRESS_ONE, + L2_BRIDGEHUB_ADDRESS, + L2_MESSAGE_ROOT_ADDRESS, + isCurrentNetworkLocal, + encodeNTVAssetId, +} from "../src.ts/utils"; export const L2_BOOTLOADER_BYTECODE_HASH = "0x1000100000000000000000000000000000000000000000000000000000000000"; export const L2_DEFAULT_ACCOUNT_BYTECODE_HASH = "0x1001000000000000000000000000000000000000000000000000000000000000"; @@ -25,16 +31,19 @@ export async function initialBridgehubDeployment( create2Salt?: string, nonce?: number ) { - nonce = nonce || (await deployer.deployWallet.getTransactionCount()); create2Salt = create2Salt || ethers.utils.hexlify(ethers.utils.randomBytes(32)); // Create2 factory already deployed on the public networks, only deploy it on local node - if (process.env.CHAIN_ETH_NETWORK === "localhost" || process.env.CHAIN_ETH_NETWORK === "hardhat") { - await deployer.deployCreate2Factory({ gasPrice, nonce }); - nonce++; + if (isCurrentNetworkLocal()) { + if (!deployer.isZkMode()) { + await deployer.deployCreate2Factory({ gasPrice, nonce }); + nonce = nonce || nonce == 0 ? ++nonce : nonce; + } else { + await deployer.updateCreate2FactoryZkMode(); + } await deployer.deployMulticall3(create2Salt, { gasPrice, nonce }); - nonce++; + nonce = nonce || nonce == 0 ? ++nonce : nonce; } if (onlyVerifier) { @@ -44,36 +53,52 @@ export async function initialBridgehubDeployment( await deployer.deployDefaultUpgrade(create2Salt, { gasPrice, - nonce, }); - nonce++; + nonce = nonce ? ++nonce : nonce; await deployer.deployGenesisUpgrade(create2Salt, { gasPrice, - nonce, }); - nonce++; - - await deployer.deployValidatorTimelock(create2Salt, { gasPrice, nonce }); - nonce++; - - await deployer.deployGovernance(create2Salt, { gasPrice, nonce }); - nonce++; + nonce = nonce ? ++nonce : nonce; + + await deployer.deployDAValidators(create2Salt, { gasPrice }); + // Governance will be L1 governance, but we want to deploy it here for the init process. + await deployer.deployGovernance(create2Salt, { gasPrice }); + await deployer.deployChainAdmin(create2Salt, { gasPrice }); + await deployer.deployValidatorTimelock(create2Salt, { gasPrice }); + + if (!deployer.isZkMode()) { + // proxy admin is already deployed when SL's L2SharedBridge is registered + await deployer.deployTransparentProxyAdmin(create2Salt, { gasPrice }); + await deployer.deployBridgehubContract(create2Salt, gasPrice); + } else { + deployer.addresses.Bridgehub.BridgehubProxy = L2_BRIDGEHUB_ADDRESS; + deployer.addresses.Bridgehub.MessageRootProxy = L2_MESSAGE_ROOT_ADDRESS; + + console.log(`CONTRACTS_BRIDGEHUB_IMPL_ADDR=${L2_BRIDGEHUB_ADDRESS}`); + console.log(`CONTRACTS_BRIDGEHUB_PROXY_ADDR=${L2_BRIDGEHUB_ADDRESS}`); + console.log(`CONTRACTS_MESSAGE_ROOT_IMPL_ADDR=${L2_MESSAGE_ROOT_ADDRESS}`); + console.log(`CONTRACTS_MESSAGE_ROOT_PROXY_ADDR=${L2_MESSAGE_ROOT_ADDRESS}`); + } - await deployer.deployChainAdmin(create2Salt, { gasPrice, nonce }); - await deployer.deployTransparentProxyAdmin(create2Salt, { gasPrice }); - await deployer.deployBridgehubContract(create2Salt, gasPrice); - await deployer.deployBlobVersionedHashRetriever(create2Salt, { gasPrice }); - await deployer.deployStateTransitionManagerContract(create2Salt, extraFacets, gasPrice); - await deployer.setStateTransitionManagerInValidatorTimelock({ gasPrice }); + // L2 Asset Router Bridge already deployed + if (!deployer.isZkMode()) { + await deployer.deploySharedBridgeContracts(create2Salt, gasPrice); + await deployer.deployERC20BridgeImplementation(create2Salt, { gasPrice }); + await deployer.deployERC20BridgeProxy(create2Salt, { gasPrice }); + await deployer.setParametersSharedBridge(); + } - await deployer.deploySharedBridgeContracts(create2Salt, gasPrice); - await deployer.deployERC20BridgeImplementation(create2Salt, { gasPrice }); - await deployer.deployERC20BridgeProxy(create2Salt, { gasPrice }); - await deployer.setParametersSharedBridge(); + if (deployer.isZkMode()) { + await deployer.updateBlobVersionedHashRetrieverZkMode(); + } else { + await deployer.deployBlobVersionedHashRetriever(create2Salt, { gasPrice }); + } + await deployer.deployChainTypeManagerContract(create2Salt, extraFacets, gasPrice); + await deployer.setChainTypeManagerInValidatorTimelock({ gasPrice }); } -export async function registerHyperchain( +export async function registerZKChain( deployer: Deployer, validiumMode: boolean, extraFacets: FacetCut[], @@ -88,16 +113,22 @@ export async function registerHyperchain( ? testnetTokens.find((token: { symbol: string }) => token.symbol == baseTokenName).address : ADDRESS_ONE; - if (!(await deployer.bridgehubContract(deployer.deployWallet).tokenIsRegistered(baseTokenAddress))) { - await deployer.registerToken(baseTokenAddress, useGovernance); + const baseTokenAssetId = encodeNTVAssetId(deployer.l1ChainId, ethers.utils.hexZeroPad(baseTokenAddress, 32)); + if (!(await deployer.bridgehubContract(deployer.deployWallet).assetIdIsRegistered(baseTokenAssetId))) { + await deployer.registerTokenBridgehub(baseTokenAddress, useGovernance); + } + if (baseTokenAddress !== ADDRESS_ONE) { + await deployer.registerTokenInNativeTokenVault(baseTokenAddress); } - await deployer.registerHyperchain( - baseTokenAddress, + await deployer.registerZKChain( + encodeNTVAssetId(deployer.l1ChainId, ethers.utils.hexZeroPad(baseTokenAddress, 32)), validiumMode, extraFacets, gasPrice, + false, null, chainId, - useGovernance + useGovernance, + true ); } diff --git a/l1-contracts/src.ts/deploy-test-process.ts b/l1-contracts/src.ts/deploy-test-process.ts index b8af27b34..07b3fcfb9 100644 --- a/l1-contracts/src.ts/deploy-test-process.ts +++ b/l1-contracts/src.ts/deploy-test-process.ts @@ -7,7 +7,6 @@ import * as ethers from "ethers"; import type { BigNumberish, Wallet } from "ethers"; import { Interface } from "ethers/lib/utils"; import * as zkethers from "zksync-ethers"; -import { ETH_ADDRESS_IN_CONTRACTS } from "zksync-ethers/build/utils"; import * as fs from "fs"; import type { FacetCut } from "./diamondCut"; @@ -16,7 +15,7 @@ import { L2_BOOTLOADER_BYTECODE_HASH, L2_DEFAULT_ACCOUNT_BYTECODE_HASH, initialBridgehubDeployment, - registerHyperchain, + registerZKChain, } from "./deploy-process"; import { deployTokens, getTokens } from "./deploy-token"; @@ -28,6 +27,9 @@ import { PubdataPricingMode, ADDRESS_ONE, EMPTY_STRING_KECCAK, + isCurrentNetworkLocal, + ETH_ADDRESS_IN_CONTRACTS, + encodeNTVAssetId, } from "./utils"; import { diamondCut, getCurrentFacetCutsForAdd, facetCut, Action } from "./diamondCut"; import { CONTRACTS_GENESIS_PROTOCOL_VERSION } from "../test/unit_tests/utils"; @@ -49,12 +51,14 @@ export async function loadDefaultEnvVarsForTests(deployWallet: Wallet) { // process.env.CONTRACTS_SHARED_BRIDGE_UPGRADE_STORAGE_SWITCH = "1"; process.env.ETH_CLIENT_CHAIN_ID = (await deployWallet.getChainId()).toString(); process.env.CONTRACTS_ERA_CHAIN_ID = "270"; + process.env.CONTRACTS_L1_CHAIN_ID = "31337"; process.env.CONTRACTS_ERA_DIAMOND_PROXY_ADDR = ADDRESS_ONE; // CONTRACTS_ERA_DIAMOND_PROXY_ADDR; process.env.CONTRACTS_L2_SHARED_BRIDGE_ADDR = ADDRESS_ONE; process.env.CONTRACTS_L2_SHARED_BRIDGE_IMPL_ADDR = ADDRESS_ONE; process.env.CONTRACTS_L2_ERC20_BRIDGE_ADDR = ADDRESS_ONE; process.env.CONTRACTS_BRIDGEHUB_PROXY_ADDR = ADDRESS_ONE; + process.env.CONTRACTS_L2_DA_VALIDATOR_ADDR = ADDRESS_ONE; } export async function defaultDeployerForTests(deployWallet: Wallet, ownerAddress: string): Promise { @@ -65,6 +69,7 @@ export async function defaultDeployerForTests(deployWallet: Wallet, ownerAddress addresses: addressConfig, bootloaderBytecodeHash: L2_BOOTLOADER_BYTECODE_HASH, defaultAccountBytecodeHash: L2_DEFAULT_ACCOUNT_BYTECODE_HASH, + l1ChainId: process.env.CONTRACTS_L1_CHAIN_ID, }); } @@ -76,6 +81,7 @@ export async function defaultEraDeployerForTests(deployWallet: Wallet, ownerAddr addresses: addressConfig, bootloaderBytecodeHash: L2_BOOTLOADER_BYTECODE_HASH, defaultAccountBytecodeHash: L2_DEFAULT_ACCOUNT_BYTECODE_HASH, + l1ChainId: process.env.CONTRACTS_L1_CHAIN_ID, }); const l2_rpc_addr = "http://localhost:3050"; const web3Provider = new zkethers.Provider(l2_rpc_addr); @@ -98,17 +104,44 @@ export async function initialTestnetDeploymentProcess( deployer.chainId = 9; const testnetTokens = getTokens(); - const result = await deployTokens(testnetTokens, deployer.deployWallet, null, false, deployer.verbose); + const result = await deployTokens(testnetTokens, deployer.deployWallet, null, true, deployer.verbose); + fs.writeFileSync(testnetTokenPath, JSON.stringify(result, null, 2)); // deploy the verifier first await initialBridgehubDeployment(deployer, extraFacets, gasPrice, true); await initialBridgehubDeployment(deployer, extraFacets, gasPrice, false); - await registerHyperchain(deployer, false, extraFacets, gasPrice, baseTokenName); + await registerZKChainWithBridgeRegistration(deployer, false, extraFacets, gasPrice, baseTokenName); + await registerTestDAValidators(deployer); + return deployer; } -// This is used to deploy the diamond and bridge such that they can be upgraded using UpgradeHyperchain.sol +export async function registerZKChainWithBridgeRegistration( + deployer: Deployer, + onlyVerifier: boolean, + extraFacets: FacetCut[], + gasPrice: BigNumberish, + baseTokenName?: string, + chainId?: string +) { + chainId = chainId ?? deployer.chainId.toString(); + await registerZKChain(deployer, onlyVerifier, extraFacets, gasPrice, baseTokenName, chainId, true); + await registerTestDAValidators(deployer); +} + +async function registerTestDAValidators(deployer: Deployer) { + const contract = await deployer.stateTransitionContract(deployer.deployWallet); + // The L2 DA validator must not be zero, but it can be any other value. It is not relevant for the tests. + await ( + await contract.setDAValidatorPair( + deployer.addresses.RollupL1DAValidator, + process.env.CONTRACTS_L2_DA_VALIDATOR_ADDR + ) + ).wait(); +} + +// This is used to deploy the diamond and bridge such that they can be upgraded using UpgradeZKChain.sol // This should be deleted after the migration export async function initialPreUpgradeContractsDeployment( deployWallet: Wallet, @@ -128,7 +161,7 @@ export async function initialPreUpgradeContractsDeployment( const create2Salt = ethers.utils.hexlify(ethers.utils.randomBytes(32)); // Create2 factory already deployed on the public networks, only deploy it on local node - if (process.env.CHAIN_ETH_NETWORK === "localhost" || process.env.CHAIN_ETH_NETWORK === "hardhat") { + if (isCurrentNetworkLocal()) { await deployer.deployCreate2Factory({ gasPrice, nonce }); nonce++; @@ -154,8 +187,8 @@ export async function initialPreUpgradeContractsDeployment( // note we should also deploy the old ERC20Bridge here, but we can do that later. // // for Era we first deploy the DiamondProxy manually, set the vars manually, - // // and register it in the system via STM.registerAlreadyDeployedStateTransition and bridgehub.createNewChain(ERA_CHAIN_ID, ..) - // // note we just deploy the STM to get the storedBatchZero + // // and register it in the system via CTM.registerAlreadyDeployedStateTransition and bridgehub.createNewChain(ERA_CHAIN_ID, ..) + // // note we just deploy the CTM to get the storedBatchZero await deployer.deployDiamondProxy(extraFacets, {}); // we have to know the address of the diamond proxy in the mailbox so we separate the deployment @@ -165,7 +198,7 @@ export async function initialPreUpgradeContractsDeployment( ); await deployer.deployStateTransitionDiamondFacets(create2Salt); - await diamondAdminFacet.executeUpgradeNoOverlap(await deployer.upgradeZkSyncHyperchainDiamondCut()); + await diamondAdminFacet.executeUpgradeNoOverlap(await deployer.upgradeZKChainDiamondCut()); return deployer; } @@ -201,15 +234,9 @@ export async function initialEraTestnetDeploymentProcess( "DummyAdminFacetNoOverlap", deployer.addresses.StateTransition.DiamondProxy ); - await diamondAdminFacet.executeUpgradeNoOverlap(await deployer.upgradeZkSyncHyperchainDiamondCut()); - - const stateTransitionManager = deployer.stateTransitionManagerContract(deployer.deployWallet); - const registerData = stateTransitionManager.interface.encodeFunctionData("registerAlreadyDeployedHyperchain", [ - deployer.chainId, - deployer.addresses.StateTransition.DiamondProxy, - ]); - await deployer.executeUpgrade(deployer.addresses.StateTransition.StateTransitionProxy, 0, registerData); - await registerHyperchain(deployer, false, extraFacets, gasPrice, baseTokenName, deployer.chainId.toString()); + await diamondAdminFacet.executeUpgradeNoOverlap(await deployer.upgradeZKChainDiamondCut()); + + await registerZKChain(deployer, false, extraFacets, gasPrice, baseTokenName, deployer.chainId.toString(), true); return deployer; } @@ -252,7 +279,7 @@ export class EraDeployer extends Deployer { await tx.wait(); } - public async upgradeZkSyncHyperchainDiamondCut(extraFacets?: FacetCut[]) { + public async upgradeZKChainDiamondCut(extraFacets?: FacetCut[]) { let facetCuts: FacetCut[] = Object.values( await getCurrentFacetCutsForAdd( this.addresses.StateTransition.AdminFacet, @@ -304,11 +331,14 @@ export class EraDeployer extends Deployer { { chainId: this.chainId, // era chain Id bridgehub: this.addresses.Bridgehub.BridgehubProxy, - stateTransitionManager: this.addresses.StateTransition.StateTransitionProxy, + chainTypeManager: this.addresses.StateTransition.StateTransitionProxy, protocolVersion: CONTRACTS_GENESIS_PROTOCOL_VERSION, admin: this.ownerAddress, validatorTimelock: ADDRESS_ONE, - baseToken: ETH_ADDRESS_IN_CONTRACTS, + baseTokenAssetId: encodeNTVAssetId( + parseInt(process.env.CONTRACTS_L1_CHAIN_ID), + ethers.utils.hexZeroPad(ETH_ADDRESS_IN_CONTRACTS, 32) + ), baseTokenBridge: this.addresses.Bridges.SharedBridgeProxy, storedBatchZero, verifier: this.addresses.StateTransition.Verifier, diff --git a/l1-contracts/src.ts/deploy-token.ts b/l1-contracts/src.ts/deploy-token.ts index 4574db7dd..ea22d7029 100644 --- a/l1-contracts/src.ts/deploy-token.ts +++ b/l1-contracts/src.ts/deploy-token.ts @@ -5,6 +5,10 @@ import type { Contract } from "ethers"; import { parseEther } from "ethers/lib/utils"; import * as fs from "fs"; +import { isZKMode } from "./utils"; + +import { deployContractWithArgs as deployContractWithArgsEVM } from "./deploy-utils"; +import { deployContractWithArgs as deployContractWithArgsZK } from "./deploy-utils-zk"; const DEFAULT_ERC20 = "TestnetERC20Token"; @@ -25,10 +29,17 @@ export async function deployContracts(tokens: TokenDescription[], wallet: Wallet for (const token of tokens) { token.implementation = token.implementation || DEFAULT_ERC20; - const tokenFactory = await hardhat.ethers.getContractFactory(token.implementation, wallet); + const args = token.implementation !== "WETH9" ? [token.name, token.symbol, token.decimals] : []; - token.contract = await tokenFactory.deploy(...args, { gasLimit: 5000000, nonce: nonce++ }); + if (isZKMode()) { + token.contract = await deployContractWithArgsZK(wallet, token.implementation, args, { nonce: nonce++ }); + } else { + token.contract = await deployContractWithArgsEVM(wallet, token.implementation, args, { + gasLimit: 5000000, + nonce: nonce++, + }); + } } await Promise.all(tokens.map(async (token) => token.contract.deployTransaction.wait())); @@ -55,16 +66,17 @@ export async function mintTokens( tokens: TokenDescription[], wallet: Wallet, nonce: number, - mnemonic: string + mnemonics: string[] ): Promise { - const targetAddresses = [wallet.address, ...getTestAddresses(mnemonic)]; + const addressArray = mnemonics.map(getTestAddresses).flat(); + const targetAddresses = [wallet.address, ...addressArray]; const results = []; const promises = []; for (const token of tokens) { if (token.implementation !== "WETH9") { for (const address of targetAddresses) { - const tx = await token.contract.mint(address, parseEther("3000000000"), { nonce: nonce++ }); + const tx = await token.contract.mint(address, parseEther("300000000000000000000"), { nonce: nonce++ }); promises.push(tx.wait()); } } @@ -113,13 +125,15 @@ export async function deployTokens( } if (token.symbol !== "WETH" && mintTokens) { - await erc20.mint(wallet.address, parseEther("3000000000")); + await erc20.mint(wallet.address, parseEther("3000000000000")); } if (mintTokens) { for (let i = 0; i < 10; ++i) { - const testWalletAddress = Wallet.fromMnemonic(mnemonic as string, "m/44'/60'/0'/0/" + i).address; + const testWalletAddress = mnemonic + ? Wallet.fromMnemonic(mnemonic as string, "m/44'/60'/0'/0/" + i).address + : wallet.address; if (token.symbol !== "WETH") { - await erc20.mint(testWalletAddress, parseEther("3000000000")); + await erc20.mint(testWalletAddress, parseEther("3000000000000")); } } } diff --git a/l1-contracts/src.ts/deploy-utils-zk.ts b/l1-contracts/src.ts/deploy-utils-zk.ts new file mode 100644 index 000000000..de7287f90 --- /dev/null +++ b/l1-contracts/src.ts/deploy-utils-zk.ts @@ -0,0 +1,176 @@ +import * as hardhat from "hardhat"; +import "@nomiclabs/hardhat-ethers"; +import { Deployer as ZkDeployer } from "@matterlabs/hardhat-zksync-deploy"; +// import "@matterlabs/hardhat-zksync-ethers"; +import { ethers } from "ethers"; +import * as path from "path"; +import { IL2ContractDeployerFactory } from "../typechain/IL2ContractDeployerFactory"; +import type { Wallet as ZkWallet } from "zksync-ethers"; +import { utils as zkUtils, ContractFactory } from "zksync-ethers"; +// import { encode } from "querystring"; +// import { web3Provider, web3Url } from "../scripts/utils"; +import { ethersWalletToZkWallet, readBytecode, readContract, readInterface } from "./utils"; + +export const BUILT_IN_ZKSYNC_CREATE2_FACTORY = "0x0000000000000000000000000000000000010000"; + +const contractsHome = process.env.ZKSYNC_HOME ? path.join(process.env.ZKSYNC_HOME as string, "contracts/") : "../"; +const contractArtifactsPath = path.join(contractsHome, "l1-contracts/artifacts-zk/"); +const openzeppelinBeaconProxyArtifactsPath = path.join( + contractArtifactsPath, + "@openzeppelin/contracts-v4/proxy/beacon" +); +const L2_SHARED_BRIDGE_PATH = contractArtifactsPath + "contracts/bridge"; +export const L2_STANDARD_ERC20_PROXY_FACTORY = readContract(openzeppelinBeaconProxyArtifactsPath, "UpgradeableBeacon"); +export const L2_STANDARD_ERC20_IMPLEMENTATION = readContract(L2_SHARED_BRIDGE_PATH, "BridgedStandardERC20"); +export const L2_STANDARD_TOKEN_PROXY = readContract(openzeppelinBeaconProxyArtifactsPath, "BeaconProxy"); + +export const L2_SHARED_BRIDGE_IMPLEMENTATION = readContract(L2_SHARED_BRIDGE_PATH, "L2SharedBridgeLegacy"); +export const L2_SHARED_BRIDGE_PROXY = readContract( + contractArtifactsPath + "@openzeppelin/contracts-v4/proxy/transparent", + "TransparentUpgradeableProxy" +); + +export async function deployViaCreate2( + deployWallet: ZkWallet, + contractName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + args: any[], + create2Salt: string, + ethTxOptions: ethers.providers.TransactionRequest, + verbose: boolean = true +): Promise<[string, string]> { + return await deployBytecodeViaCreate2(deployWallet, contractName, create2Salt, ethTxOptions, args, verbose); +} + +export async function deployBytecodeViaCreate2( + deployWallet: ZkWallet, + contractName: string, + create2Salt: string, + ethTxOptions: ethers.providers.TransactionRequest, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + args: any[], + verbose: boolean = true +): Promise<[string, string]> { + // [address, txHash] + + const log = (msg: string) => { + if (verbose) { + console.log(msg); + } + }; + log(`Deploying ${contractName}`); + + // @ts-ignore + const zkDeployer = new ZkDeployer(hardhat, deployWallet); + const artifact = await zkDeployer.loadArtifact(contractName); + const factoryDeps = await zkDeployer.extractFactoryDeps(artifact); + + const bytecodeHash = zkUtils.hashBytecode(artifact.bytecode); + const iface = new ethers.utils.Interface(artifact.abi); + const encodedArgs = iface.encodeDeploy(args); + + // The CREATE2Factory has the same interface as the contract deployer + const create2Factory = IL2ContractDeployerFactory.connect(BUILT_IN_ZKSYNC_CREATE2_FACTORY, deployWallet); + const expectedAddress = zkUtils.create2Address(create2Factory.address, bytecodeHash, create2Salt, encodedArgs); + + const deployedBytecodeBefore = await deployWallet.provider.getCode(expectedAddress); + if (ethers.utils.hexDataLength(deployedBytecodeBefore) > 0) { + log(`Contract ${contractName} already deployed`); + return [expectedAddress, ethers.constants.HashZero]; + } + + const encodedTx = create2Factory.interface.encodeFunctionData("create2", [create2Salt, bytecodeHash, encodedArgs]); + + const tx = await deployWallet.sendTransaction({ + data: encodedTx, + to: create2Factory.address, + ...ethTxOptions, + customData: { + factoryDeps: [artifact.bytecode, ...factoryDeps], + }, + }); + const receipt = await tx.wait(); + + const gasUsed = receipt.gasUsed; + log(`${contractName} deployed, gasUsed: ${gasUsed.toString()}`); + + const deployedBytecodeAfter = await deployWallet.provider.getCode(expectedAddress); + if (ethers.utils.hexDataLength(deployedBytecodeAfter) == 0) { + throw new Error(`Failed to deploy ${contractName} bytecode via create2 factory`); + } + + return [expectedAddress, tx.hash]; +} + +export async function deployBytecodeViaCreate2OnPath( + deployWallet: ZkWallet, + contractName: string, + contractPath: string, + create2Salt: string, + ethTxOptions: ethers.providers.TransactionRequest, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + args: any[], + factoryDeps: string[] = [], + verbose: boolean = true +): Promise<[string, string]> { + // [address, txHash] + + const log = (msg: string) => { + if (verbose) { + console.log(msg); + } + }; + + // @ts-ignore + // const zkDeployer = new ZkDeployer(hardhat, deployWallet); + const bytecode = readBytecode(contractPath, contractName); + + const bytecodeHash = zkUtils.hashBytecode(bytecode); + const iface = readInterface(contractPath, contractName); + const encodedArgs = iface.encodeDeploy(args); + + // The CREATE2Factory has the same interface as the contract deployer + const create2Factory = IL2ContractDeployerFactory.connect(BUILT_IN_ZKSYNC_CREATE2_FACTORY, deployWallet); + const expectedAddress = zkUtils.create2Address(create2Factory.address, bytecodeHash, create2Salt, encodedArgs); + + const deployedBytecodeBefore = await deployWallet.provider.getCode(expectedAddress); + if (ethers.utils.hexDataLength(deployedBytecodeBefore) > 0) { + log(`Contract ${contractName} already deployed`); + return [expectedAddress, ethers.constants.HashZero]; + } + + const encodedTx = create2Factory.interface.encodeFunctionData("create2", [create2Salt, bytecodeHash, encodedArgs]); + + const tx = await deployWallet.sendTransaction({ + data: encodedTx, + to: create2Factory.address, + ...ethTxOptions, + customData: { + factoryDeps: [bytecode, ...factoryDeps], + }, + }); + const receipt = await tx.wait(); + + const gasUsed = receipt.gasUsed; + log(`${contractName} deployed, gasUsed: ${gasUsed.toString()}`); + + const deployedBytecodeAfter = await deployWallet.provider.getCode(expectedAddress); + if (ethers.utils.hexDataLength(deployedBytecodeAfter) == 0) { + throw new Error(`Failed to deploy ${contractName} bytecode via create2 factory`); + } + + return [expectedAddress, tx.hash]; +} +export async function deployContractWithArgs( + wallet: ethers.Wallet, + contractName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + args: any[], + ethTxOptions: ethers.providers.TransactionRequest +) { + const artifact = await hardhat.artifacts.readArtifact(contractName); + const zkWallet = ethersWalletToZkWallet(wallet); + const factory = new ContractFactory(artifact.abi, artifact.bytecode, zkWallet); + + return await factory.deploy(...args, ethTxOptions); +} diff --git a/l1-contracts/src.ts/deploy-utils.ts b/l1-contracts/src.ts/deploy-utils.ts index 71b4d0c31..dcec2c180 100644 --- a/l1-contracts/src.ts/deploy-utils.ts +++ b/l1-contracts/src.ts/deploy-utils.ts @@ -1,9 +1,19 @@ import * as hardhat from "hardhat"; import "@nomiclabs/hardhat-ethers"; import { ethers } from "ethers"; +import { Interface } from "ethers/lib/utils"; import { SingletonFactoryFactory } from "../typechain"; -import { getAddressFromEnv } from "./utils"; +import { + encodeNTVAssetId, + getAddressFromEnv, + getNumberFromEnv, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + DEPLOYER_SYSTEM_CONTRACT_ADDRESS, + ADDRESS_ONE, +} from "./utils"; +import { IBridgehubFactory } from "../typechain/IBridgehubFactory"; +import { IERC20Factory } from "../typechain/IERC20Factory"; export async function deployViaCreate2( deployWallet: ethers.Wallet, @@ -15,15 +25,18 @@ export async function deployViaCreate2( create2FactoryAddress: string, verbose: boolean = true, // eslint-disable-next-line @typescript-eslint/no-explicit-any - libraries?: any + libraries?: any, + bytecode?: ethers.utils.BytesLike ): Promise<[string, string]> { // [address, txHash] - const contractFactory = await hardhat.ethers.getContractFactory(contractName, { - signer: deployWallet, - libraries, - }); - const bytecode = contractFactory.getDeployTransaction(...args, ethTxOptions).data; + if (!bytecode) { + const contractFactory = await hardhat.ethers.getContractFactory(contractName, { + signer: deployWallet, + libraries, + }); + bytecode = contractFactory.getDeployTransaction(...args, ethTxOptions).data; + } return await deployBytecodeViaCreate2( deployWallet, @@ -83,10 +96,111 @@ export async function deployBytecodeViaCreate2( return [expectedAddress, tx.hash]; } +export async function deployContractWithArgs( + wallet: ethers.Wallet, + contractName: string, + // eslint-disable-next-line + args: any[], + ethTxOptions: ethers.providers.TransactionRequest +) { + const factory = await hardhat.ethers.getContractFactory(contractName, wallet); + + return await factory.deploy(...args, ethTxOptions); +} + +export function hashL2Bytecode(bytecode: ethers.BytesLike): Uint8Array { + // For getting the consistent length we first convert the bytecode to UInt8Array + const bytecodeAsArray = ethers.utils.arrayify(bytecode); + + if (bytecodeAsArray.length % 32 != 0) { + throw new Error("The bytecode length in bytes must be divisible by 32"); + } + + const hashStr = ethers.utils.sha256(bytecodeAsArray); + const hash = ethers.utils.arrayify(hashStr); + + // Note that the length of the bytecode + // should be provided in 32-byte words. + const bytecodeLengthInWords = bytecodeAsArray.length / 32; + if (bytecodeLengthInWords % 2 == 0) { + throw new Error("Bytecode length in 32-byte words must be odd"); + } + const bytecodeLength = ethers.utils.arrayify(bytecodeAsArray.length / 32); + if (bytecodeLength.length > 2) { + throw new Error("Bytecode length must be less than 2^16 bytes"); + } + // The bytecode should always take the first 2 bytes of the bytecode hash, + // so we pad it from the left in case the length is smaller than 2 bytes. + const bytecodeLengthPadded = ethers.utils.zeroPad(bytecodeLength, 2); + + const codeHashVersion = new Uint8Array([1, 0]); + hash.set(codeHashVersion, 0); + hash.set(bytecodeLengthPadded, 2); + + return hash; +} + +export async function create2DeployFromL1( + chainId: ethers.BigNumberish, + wallet: ethers.Wallet, + bytecode: ethers.BytesLike, + constructor: ethers.BytesLike, + create2Salt: ethers.BytesLike, + l2GasLimit: ethers.BigNumberish, + gasPrice?: ethers.BigNumberish, + extraFactoryDeps?: ethers.BytesLike[], + bridgehubAddress?: string, + assetRouterAddress?: string +) { + bridgehubAddress = bridgehubAddress ?? deployedAddressesFromEnv().Bridgehub.BridgehubProxy; + const bridgehub = IBridgehubFactory.connect(bridgehubAddress, wallet); + + const deployerSystemContracts = new Interface(hardhat.artifacts.readArtifactSync("IContractDeployer").abi); + const bytecodeHash = hashL2Bytecode(bytecode); + const calldata = deployerSystemContracts.encodeFunctionData("create2", [create2Salt, bytecodeHash, constructor]); + gasPrice ??= await bridgehub.provider.getGasPrice(); + const expectedCost = await bridgehub.l2TransactionBaseCost( + chainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + const baseTokenAddress = await bridgehub.baseToken(chainId); + const baseTokenBridge = assetRouterAddress ?? deployedAddressesFromEnv().Bridges.SharedBridgeProxy; + const ethIsBaseToken = ADDRESS_ONE == baseTokenAddress; + + if (!ethIsBaseToken) { + const baseToken = IERC20Factory.connect(baseTokenAddress, wallet); + const tx = await baseToken.approve(baseTokenBridge, expectedCost); + await tx.wait(); + } + const factoryDeps = extraFactoryDeps ? [bytecode, ...extraFactoryDeps] : [bytecode]; + + return await bridgehub.requestL2TransactionDirect( + { + chainId, + l2Contract: DEPLOYER_SYSTEM_CONTRACT_ADDRESS, + mintValue: expectedCost, + l2Value: 0, + l2Calldata: calldata, + l2GasLimit, + l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + factoryDeps: factoryDeps, + refundRecipient: wallet.address, + }, + { value: ethIsBaseToken ? expectedCost : 0, gasPrice } + ); +} + export interface DeployedAddresses { Bridgehub: { BridgehubProxy: string; BridgehubImplementation: string; + CTMDeploymentTrackerImplementation: string; + CTMDeploymentTrackerProxy: string; + MessageRootImplementation: string; + MessageRootProxy: string; }; StateTransition: { StateTransitionProxy: string; @@ -103,27 +217,55 @@ export interface DeployedAddresses { DiamondProxy: string; }; Bridges: { + L1NullifierImplementation: string; + L1NullifierProxy: string; ERC20BridgeImplementation: string; ERC20BridgeProxy: string; SharedBridgeImplementation: string; SharedBridgeProxy: string; L2SharedBridgeProxy: string; L2SharedBridgeImplementation: string; + L2LegacySharedBridgeProxy: string; + L2LegacySharedBridgeImplementation: string; + L2NativeTokenVaultImplementation: string; + L2NativeTokenVaultProxy: string; + NativeTokenVaultImplementation: string; + NativeTokenVaultProxy: string; + BridgedStandardERC20Implementation: string; + BridgedTokenBeacon: string; }; + BaseTokenAssetId: string; BaseToken: string; TransparentProxyAdmin: string; + L2ProxyAdmin: string; Governance: string; ChainAdmin: string; BlobVersionedHashRetriever: string; ValidatorTimeLock: string; + RollupL1DAValidator: string; + ValidiumL1DAValidator: string; + RelayedSLDAValidator: string; Create2Factory: string; } export function deployedAddressesFromEnv(): DeployedAddresses { + let baseTokenAssetId = "0"; + try { + baseTokenAssetId = getAddressFromEnv("CONTRACTS_BASE_TOKEN_ASSET_ID"); + } catch (error) { + baseTokenAssetId = encodeNTVAssetId( + parseInt(getNumberFromEnv("ETH_CLIENT_CHAIN_ID")), + ethers.utils.hexZeroPad(getAddressFromEnv("CONTRACTS_BASE_TOKEN_ADDR"), 32) + ); + } return { Bridgehub: { BridgehubProxy: getAddressFromEnv("CONTRACTS_BRIDGEHUB_PROXY_ADDR"), BridgehubImplementation: getAddressFromEnv("CONTRACTS_BRIDGEHUB_IMPL_ADDR"), + CTMDeploymentTrackerImplementation: getAddressFromEnv("CONTRACTS_CTM_DEPLOYMENT_TRACKER_IMPL_ADDR"), + CTMDeploymentTrackerProxy: getAddressFromEnv("CONTRACTS_CTM_DEPLOYMENT_TRACKER_PROXY_ADDR"), + MessageRootImplementation: getAddressFromEnv("CONTRACTS_MESSAGE_ROOT_IMPL_ADDR"), + MessageRootProxy: getAddressFromEnv("CONTRACTS_MESSAGE_ROOT_PROXY_ADDR"), }, StateTransition: { StateTransitionProxy: getAddressFromEnv("CONTRACTS_STATE_TRANSITION_PROXY_ADDR"), @@ -140,15 +282,30 @@ export function deployedAddressesFromEnv(): DeployedAddresses { DiamondProxy: getAddressFromEnv("CONTRACTS_DIAMOND_PROXY_ADDR"), }, Bridges: { + L1NullifierImplementation: getAddressFromEnv("CONTRACTS_L1_NULLIFIER_IMPL_ADDR"), + L1NullifierProxy: getAddressFromEnv("CONTRACTS_L1_NULLIFIER_PROXY_ADDR"), ERC20BridgeImplementation: getAddressFromEnv("CONTRACTS_L1_ERC20_BRIDGE_IMPL_ADDR"), ERC20BridgeProxy: getAddressFromEnv("CONTRACTS_L1_ERC20_BRIDGE_PROXY_ADDR"), SharedBridgeImplementation: getAddressFromEnv("CONTRACTS_L1_SHARED_BRIDGE_IMPL_ADDR"), SharedBridgeProxy: getAddressFromEnv("CONTRACTS_L1_SHARED_BRIDGE_PROXY_ADDR"), + L2NativeTokenVaultImplementation: getAddressFromEnv("CONTRACTS_L2_NATIVE_TOKEN_VAULT_IMPL_ADDR"), + L2NativeTokenVaultProxy: getAddressFromEnv("CONTRACTS_L2_NATIVE_TOKEN_VAULT_PROXY_ADDR"), L2SharedBridgeImplementation: getAddressFromEnv("CONTRACTS_L2_SHARED_BRIDGE_IMPL_ADDR"), L2SharedBridgeProxy: getAddressFromEnv("CONTRACTS_L2_SHARED_BRIDGE_ADDR"), + L2LegacySharedBridgeProxy: getAddressFromEnv("CONTRACTS_L2_LEGACY_SHARED_BRIDGE_ADDR"), + L2LegacySharedBridgeImplementation: getAddressFromEnv("CONTRACTS_L2_LEGACY_SHARED_BRIDGE_IMPL_ADDR"), + NativeTokenVaultImplementation: getAddressFromEnv("CONTRACTS_L1_NATIVE_TOKEN_VAULT_IMPL_ADDR"), + NativeTokenVaultProxy: getAddressFromEnv("CONTRACTS_L1_NATIVE_TOKEN_VAULT_PROXY_ADDR"), + BridgedStandardERC20Implementation: getAddressFromEnv("CONTRACTS_L1_BRIDGED_STANDARD_ERC20_IMPL_ADDR"), + BridgedTokenBeacon: getAddressFromEnv("CONTRACTS_L1_BRIDGED_TOKEN_BEACON_ADDR"), }, + RollupL1DAValidator: getAddressFromEnv("CONTRACTS_L1_ROLLUP_DA_VALIDATOR"), + ValidiumL1DAValidator: getAddressFromEnv("CONTRACTS_L1_VALIDIUM_DA_VALIDATOR"), + RelayedSLDAValidator: getAddressFromEnv("CONTRACTS_L1_RELAYED_SL_DA_VALIDATOR"), BaseToken: getAddressFromEnv("CONTRACTS_BASE_TOKEN_ADDR"), + BaseTokenAssetId: baseTokenAssetId, TransparentProxyAdmin: getAddressFromEnv("CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR"), + L2ProxyAdmin: getAddressFromEnv("CONTRACTS_L2_PROXY_ADMIN_ADDR"), Create2Factory: getAddressFromEnv("CONTRACTS_CREATE2_FACTORY_ADDR"), BlobVersionedHashRetriever: getAddressFromEnv("CONTRACTS_BLOB_VERSIONED_HASH_RETRIEVER_ADDR"), ValidatorTimeLock: getAddressFromEnv("CONTRACTS_VALIDATOR_TIMELOCK_ADDR"), diff --git a/l1-contracts/src.ts/deploy.ts b/l1-contracts/src.ts/deploy.ts index 52ae1d086..0c8ccc2cc 100644 --- a/l1-contracts/src.ts/deploy.ts +++ b/l1-contracts/src.ts/deploy.ts @@ -1,16 +1,38 @@ import * as hardhat from "hardhat"; import "@nomiclabs/hardhat-ethers"; +// import "@matterlabs/hardhat-zksync-ethers"; -import type { BigNumberish, providers, Signer, Wallet } from "ethers"; +import type { BigNumberish, providers, Signer, Wallet, Contract } from "ethers"; import { ethers } from "ethers"; import { hexlify, Interface } from "ethers/lib/utils"; +import { Wallet as ZkWallet, ContractFactory as ZkContractFactory } from "zksync-ethers"; + import type { DeployedAddresses } from "./deploy-utils"; -import { deployedAddressesFromEnv, deployBytecodeViaCreate2, deployViaCreate2 } from "./deploy-utils"; +import { + deployedAddressesFromEnv, + deployBytecodeViaCreate2 as deployBytecodeViaCreate2EVM, + deployViaCreate2 as deployViaCreate2EVM, + create2DeployFromL1, +} from "./deploy-utils"; +import { + deployViaCreate2 as deployViaCreate2Zk, + BUILT_IN_ZKSYNC_CREATE2_FACTORY, + L2_STANDARD_ERC20_PROXY_FACTORY, + L2_STANDARD_ERC20_IMPLEMENTATION, + L2_STANDARD_TOKEN_PROXY, + L2_SHARED_BRIDGE_IMPLEMENTATION, + L2_SHARED_BRIDGE_PROXY, + // deployBytecodeViaCreate2OnPath, + // L2_SHARED_BRIDGE_PATH, +} from "./deploy-utils-zk"; import { packSemver, readBatchBootloaderBytecode, readSystemContractsBytecode, unpackStringSemVer, + SYSTEM_CONFIG, + // web3Provider, + // web3Url, } from "../scripts/utils"; import { getTokens } from "./deploy-token"; import { @@ -21,35 +43,55 @@ import { PubdataPricingMode, hashL2Bytecode, DIAMOND_CUT_DATA_ABI_STRING, + FIXED_FORCE_DEPLOYMENTS_DATA_ABI_STRING, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA, compileInitialCutHash, + readBytecode, + applyL1ToL2Alias, + BRIDGEHUB_CTM_ASSET_DATA_ABI_STRING, + encodeNTVAssetId, + computeL2Create2Address, + priorityTxMaxGasLimit, + isCurrentNetworkLocal, } from "./utils"; -import { IBridgehubFactory } from "../typechain/IBridgehubFactory"; +import type { ChainAdminCall } from "./utils"; import { IGovernanceFactory } from "../typechain/IGovernanceFactory"; -import { IStateTransitionManagerFactory } from "../typechain/IStateTransitionManagerFactory"; import { ITransparentUpgradeableProxyFactory } from "../typechain/ITransparentUpgradeableProxyFactory"; import { ProxyAdminFactory } from "../typechain/ProxyAdminFactory"; -import { IZkSyncHyperchainFactory } from "../typechain/IZkSyncHyperchainFactory"; -import { L1SharedBridgeFactory } from "../typechain/L1SharedBridgeFactory"; +import { IZKChainFactory } from "../typechain/IZKChainFactory"; +import { L1AssetRouterFactory } from "../typechain/L1AssetRouterFactory"; +import { L1NullifierDevFactory } from "../typechain/L1NullifierDevFactory"; import { SingletonFactoryFactory } from "../typechain/SingletonFactoryFactory"; import { ValidatorTimelockFactory } from "../typechain/ValidatorTimelockFactory"; + import type { FacetCut } from "./diamondCut"; import { getCurrentFacetCutsForAdd } from "./diamondCut"; -import { ChainAdminFactory, ERC20Factory } from "../typechain"; -import type { Contract, Overrides } from "@ethersproject/contracts"; +import { BridgehubFactory, ChainAdminFactory, ERC20Factory, ChainTypeManagerFactory } from "../typechain"; + +import { IL1AssetRouterFactory } from "../typechain/IL1AssetRouterFactory"; +import { IL1NativeTokenVaultFactory } from "../typechain/IL1NativeTokenVaultFactory"; +import { IL1NullifierFactory } from "../typechain/IL1NullifierFactory"; +import { ICTMDeploymentTrackerFactory } from "../typechain/ICTMDeploymentTrackerFactory"; +import { TestnetERC20TokenFactory } from "../typechain/TestnetERC20TokenFactory"; + +import { RollupL1DAValidatorFactory } from "../../da-contracts/typechain/RollupL1DAValidatorFactory"; let L2_BOOTLOADER_BYTECODE_HASH: string; let L2_DEFAULT_ACCOUNT_BYTECODE_HASH: string; export interface DeployerConfig { - deployWallet: Wallet; + deployWallet: Wallet | ZkWallet; addresses?: DeployedAddresses; ownerAddress?: string; verbose?: boolean; bootloaderBytecodeHash?: string; defaultAccountBytecodeHash?: string; + deployedLogPrefix?: string; + l1Deployer?: Deployer; + l1ChainId?: string; } export interface Operation { @@ -62,10 +104,16 @@ export type OperationOrString = Operation | string; export class Deployer { public addresses: DeployedAddresses; - public deployWallet: Wallet; + public deployWallet: Wallet | ZkWallet; public verbose: boolean; public chainId: number; + public l1ChainId: number; public ownerAddress: string; + public deployedLogPrefix: string; + + public isZkMode(): boolean { + return this.deployWallet instanceof ZkWallet; + } constructor(config: DeployerConfig) { this.deployWallet = config.deployWallet; @@ -79,9 +127,11 @@ export class Deployer { : hexlify(hashL2Bytecode(readSystemContractsBytecode("DefaultAccount"))); this.ownerAddress = config.ownerAddress != null ? config.ownerAddress : this.deployWallet.address; this.chainId = parseInt(process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID!); + this.l1ChainId = parseInt(config.l1ChainId || getNumberFromEnv("ETH_CLIENT_CHAIN_ID")); + this.deployedLogPrefix = config.deployedLogPrefix ?? "CONTRACTS"; } - public async initialZkSyncHyperchainDiamondCut(extraFacets?: FacetCut[]) { + public async initialZkSyncZKChainDiamondCut(extraFacets?: FacetCut[], compareDiamondCutHash: boolean = false) { let facetCuts: FacetCut[] = Object.values( await getCurrentFacetCutsForAdd( this.addresses.StateTransition.AdminFacet, @@ -99,7 +149,7 @@ export class Deployer { }; const priorityTxMaxGasLimit = getNumberFromEnv("CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT"); - return compileInitialCutHash( + const diamondCut = compileInitialCutHash( facetCuts, verifierParams, L2_BOOTLOADER_BYTECODE_HASH, @@ -110,6 +160,72 @@ export class Deployer { this.addresses.StateTransition.DiamondInit, false ); + + if (compareDiamondCutHash) { + const hash = ethers.utils.keccak256( + ethers.utils.defaultAbiCoder.encode([DIAMOND_CUT_DATA_ABI_STRING], [diamondCut]) + ); + + console.log(`Diamond cut hash: ${hash}`); + const ctm = ChainTypeManagerFactory.connect( + this.addresses.StateTransition.StateTransitionProxy, + this.deployWallet + ); + + const hashFromCTM = await ctm.initialCutHash(); + if (hash != hashFromCTM) { + throw new Error(`Has from CTM ${hashFromCTM} does not match the computed hash ${hash}`); + } + } + + return diamondCut; + } + + public async genesisForceDeploymentsData() { + let bridgehubZKBytecode = ethers.constants.HashZero; + let assetRouterZKBytecode = ethers.constants.HashZero; + let nativeTokenVaultZKBytecode = ethers.constants.HashZero; + let l2TokenProxyBytecodeHash = ethers.constants.HashZero; + let messageRootZKBytecode = ethers.constants.HashZero; + if (process.env.CHAIN_ETH_NETWORK != "hardhat") { + bridgehubZKBytecode = readBytecode("./artifacts-zk/contracts/bridgehub", "Bridgehub"); + assetRouterZKBytecode = readBytecode("./artifacts-zk/contracts/bridge/asset-router", "L2AssetRouter"); + nativeTokenVaultZKBytecode = readBytecode("./artifacts-zk/contracts/bridge/ntv", "L2NativeTokenVault"); + messageRootZKBytecode = readBytecode("./artifacts-zk/contracts/bridgehub", "MessageRoot"); + const l2TokenProxyBytecode = readBytecode( + "./artifacts-zk/@openzeppelin/contracts-v4/proxy/beacon", + "BeaconProxy" + ); + l2TokenProxyBytecodeHash = ethers.utils.hexlify(hashL2Bytecode(l2TokenProxyBytecode)); + } + const fixedForceDeploymentsData = { + l1ChainId: getNumberFromEnv("ETH_CLIENT_CHAIN_ID"), + eraChainId: getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"), + l1AssetRouter: this.addresses.Bridges.SharedBridgeProxy, + l2TokenProxyBytecodeHash: l2TokenProxyBytecodeHash, + aliasedL1Governance: applyL1ToL2Alias(this.addresses.Governance), + maxNumberOfZKChains: getNumberFromEnv("CONTRACTS_MAX_NUMBER_OF_ZK_CHAINS"), + bridgehubBytecodeHash: ethers.utils.hexlify(hashL2Bytecode(bridgehubZKBytecode)), + l2AssetRouterBytecodeHash: ethers.utils.hexlify(hashL2Bytecode(assetRouterZKBytecode)), + l2NtvBytecodeHash: ethers.utils.hexlify(hashL2Bytecode(nativeTokenVaultZKBytecode)), + messageRootBytecodeHash: ethers.utils.hexlify(hashL2Bytecode(messageRootZKBytecode)), + l2SharedBridgeLegacyImpl: ethers.constants.AddressZero, + l2BridgedStandardERC20Impl: ethers.constants.AddressZero, + l2BridgeProxyOwnerAddress: ethers.constants.AddressZero, + l2BridgedStandardERC20ProxyOwnerAddress: ethers.constants.AddressZero, + }; + + return ethers.utils.defaultAbiCoder.encode([FIXED_FORCE_DEPLOYMENTS_DATA_ABI_STRING], [fixedForceDeploymentsData]); + } + + public async updateCreate2FactoryZkMode() { + if (!this.isZkMode()) { + throw new Error("`updateCreate2FactoryZkMode` should be only called in Zk mode"); + } + + console.log("Create2Factory is built into zkSync and so won't be deployed separately"); + console.log(`CONTRACTS_CREATE2_FACTORY_ADDR=${BUILT_IN_ZKSYNC_CREATE2_FACTORY}`); + this.addresses.Create2Factory = BUILT_IN_ZKSYNC_CREATE2_FACTORY; } public async deployCreate2Factory(ethTxOptions?: ethers.providers.TransactionRequest) { @@ -117,6 +233,10 @@ export class Deployer { console.log("Deploying Create2 factory"); } + if (this.isZkMode()) { + throw new Error("Create2Factory is built into zkSync and should not be deployed separately"); + } + const contractFactory = await hardhat.ethers.getContractFactory("SingletonFactory", { signer: this.deployWallet, }); @@ -126,7 +246,7 @@ export class Deployer { if (this.verbose) { console.log(`CONTRACTS_CREATE2_FACTORY_ADDR=${create2Factory.address}`); - console.log(`Create2 factory deployed, gasUsed: ${rec.gasUsed.toString()}`); + console.log(`Create2 factory deployed, gasUsed: ${rec.gasUsed.toString()}, ${rec.transactionHash}`); } this.addresses.Create2Factory = create2Factory.address; @@ -139,9 +259,28 @@ export class Deployer { create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest, // eslint-disable-next-line @typescript-eslint/no-explicit-any - libraries?: any + libraries?: any, + bytecode?: ethers.utils.BytesLike ) { - const result = await deployViaCreate2( + if (this.isZkMode()) { + if (bytecode != null) { + return ADDRESS_ONE; + // note providing bytecode is only for da-contracts on L1, we can skip it here + } + const result = await deployViaCreate2Zk( + this.deployWallet as ZkWallet, + contractName, + args, + create2Salt, + ethTxOptions, + this.verbose + ); + return result[0]; + } + + // For L1 deployments we try to use constant gas limit + ethTxOptions.gasLimit ??= 10_000_000; + const result = await deployViaCreate2EVM( this.deployWallet, contractName, args, @@ -149,18 +288,35 @@ export class Deployer { ethTxOptions, this.addresses.Create2Factory, this.verbose, - libraries + libraries, + bytecode ); return result[0]; } + public async loadFromDAFolder(contractName: string) { + let factory; + if (contractName == "RollupL1DAValidator") { + factory = new RollupL1DAValidatorFactory(this.deployWallet); + } else { + throw new Error(`Unknown DA contract name ${contractName}`); + } + return factory.getDeployTransaction().data; + } + private async deployBytecodeViaCreate2( contractName: string, bytecode: ethers.BytesLike, create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest ): Promise { - const result = await deployBytecodeViaCreate2( + if (this.isZkMode()) { + throw new Error("`deployBytecodeViaCreate2` not supported in zkMode"); + } + + ethTxOptions.gasLimit ??= 10_000_000; + + const result = await deployBytecodeViaCreate2EVM( this.deployWallet, contractName, bytecode, @@ -174,7 +330,6 @@ export class Deployer { } public async deployGovernance(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const contractAddress = await this.deployViaCreate2( "Governance", // TODO: load parameters from config @@ -192,9 +347,21 @@ export class Deployer { public async deployChainAdmin(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { ethTxOptions.gasLimit ??= 10_000_000; + // Firstly, we deploy the access control restriction for the chain admin + const accessControlRestriction = await this.deployViaCreate2( + "AccessControlRestriction", + [0, this.ownerAddress], + create2Salt, + ethTxOptions + ); + if (this.verbose) { + console.log(`CONTRACTS_ACCESS_CONTROL_RESTRICTION_ADDR=${accessControlRestriction}`); + } + + // Then we deploy the ChainAdmin contract itself const contractAddress = await this.deployViaCreate2( "ChainAdmin", - [this.ownerAddress, ethers.constants.AddressZero], + [[accessControlRestriction]], create2Salt, ethTxOptions ); @@ -204,35 +371,44 @@ export class Deployer { this.addresses.ChainAdmin = contractAddress; } - public async deployBridgehubImplementation(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; - const contractAddress = await this.deployViaCreate2("Bridgehub", [], create2Salt, ethTxOptions); - - if (this.verbose) { - console.log(`CONTRACTS_BRIDGEHUB_IMPL_ADDR=${contractAddress}`); - } - - this.addresses.Bridgehub.BridgehubImplementation = contractAddress; - } - public async deployTransparentProxyAdmin(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; if (this.verbose) { console.log("Deploying Proxy Admin"); } // Note: we cannot deploy using Create2, as the owner of the ProxyAdmin is msg.sender - const contractFactory = await hardhat.ethers.getContractFactory("ProxyAdmin", { - signer: this.deployWallet, - }); - - const proxyAdmin = await contractFactory.deploy(...[ethTxOptions]); - const rec = await proxyAdmin.deployTransaction.wait(); + let proxyAdmin; + let rec; + + if (this.isZkMode()) { + // @ts-ignore + // TODO try to make it work with zksync ethers + const zkWal = this.deployWallet as ZkWallet; + // FIXME: this is a hack + const tmpContractFactory = await hardhat.ethers.getContractFactory( + "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol:ProxyAdmin", + { + signer: this.deployWallet, + } + ); + const contractFactory = new ZkContractFactory(tmpContractFactory.interface, tmpContractFactory.bytecode, zkWal); + proxyAdmin = await contractFactory.deploy(...[ethTxOptions]); + rec = await proxyAdmin.deployTransaction.wait(); + } else { + ethTxOptions.gasLimit ??= 10_000_000; + const contractFactory = await hardhat.ethers.getContractFactory( + "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol:ProxyAdmin", + { + signer: this.deployWallet, + } + ); + proxyAdmin = await contractFactory.deploy(...[ethTxOptions]); + rec = await proxyAdmin.deployTransaction.wait(); + } if (this.verbose) { console.log( - `Proxy admin deployed, gasUsed: ${rec.gasUsed.toString()}, tx hash ${rec.transactionHash}, expected address: ${ - proxyAdmin.address - }` + `Proxy admin deployed, gasUsed: ${rec.gasUsed.toString()}, tx hash ${rec.transactionHash}, expected address: + ${proxyAdmin.address}` ); console.log(`CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR=${proxyAdmin.address}`); } @@ -244,22 +420,34 @@ export class Deployer { if (this.verbose) { console.log( - `ProxyAdmin ownership transferred to Governance in tx ${ - receipt.transactionHash - }, gas used: ${receipt.gasUsed.toString()}` + `ProxyAdmin ownership transferred to Governance in tx + ${receipt.transactionHash}, gas used: ${receipt.gasUsed.toString()}` ); } } - public async deployBridgehubProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; + public async deployBridgehubImplementation(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + const contractAddress = await this.deployViaCreate2( + "Bridgehub", + [await this.getL1ChainId(), this.addresses.Governance, getNumberFromEnv("CONTRACTS_MAX_NUMBER_OF_ZK_CHAINS")], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_BRIDGEHUB_IMPL_ADDR=${contractAddress}`); + } + + this.addresses.Bridgehub.BridgehubImplementation = contractAddress; + } + public async deployBridgehubProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { const bridgehub = new Interface(hardhat.artifacts.readArtifactSync("Bridgehub").abi); - const initCalldata = bridgehub.encodeFunctionData("initialize", [this.ownerAddress]); + const initCalldata = bridgehub.encodeFunctionData("initialize", [this.addresses.Governance]); const contractAddress = await this.deployViaCreate2( - "TransparentUpgradeableProxy", + "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol:TransparentUpgradeableProxy", [this.addresses.Bridgehub.BridgehubImplementation, this.addresses.TransparentProxyAdmin, initCalldata], create2Salt, ethTxOptions @@ -272,16 +460,52 @@ export class Deployer { this.addresses.Bridgehub.BridgehubProxy = contractAddress; } - public async deployStateTransitionManagerImplementation( + public async deployMessageRootImplementation(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + const contractAddress = await this.deployViaCreate2( + "MessageRoot", + [this.addresses.Bridgehub.BridgehubProxy], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_MESSAGE_ROOT_IMPL_ADDR=${contractAddress}`); + } + + this.addresses.Bridgehub.MessageRootImplementation = contractAddress; + } + + public async deployMessageRootProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + const messageRoot = new Interface(hardhat.artifacts.readArtifactSync("MessageRoot").abi); + + const initCalldata = messageRoot.encodeFunctionData("initialize"); + + const contractAddress = await this.deployViaCreate2( + "TransparentUpgradeableProxy", + [this.addresses.Bridgehub.MessageRootImplementation, this.addresses.TransparentProxyAdmin, initCalldata], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_MESSAGE_ROOT_PROXY_ADDR=${contractAddress}`); + } + + this.addresses.Bridgehub.MessageRootProxy = contractAddress; + } + + public async deployChainTypeManagerImplementation( create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest ) { - ethTxOptions.gasLimit ??= 10_000_000; const contractAddress = await this.deployViaCreate2( - "StateTransitionManager", - [this.addresses.Bridgehub.BridgehubProxy, getNumberFromEnv("CONTRACTS_MAX_NUMBER_OF_HYPERCHAINS")], + "ChainTypeManager", + [this.addresses.Bridgehub.BridgehubProxy], create2Salt, - ethTxOptions + { + ...ethTxOptions, + gasLimit: 20_000_000, + } ); if (this.verbose) { @@ -291,29 +515,29 @@ export class Deployer { this.addresses.StateTransition.StateTransitionImplementation = contractAddress; } - public async deployStateTransitionManagerProxy( + public async deployChainTypeManagerProxy( create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest, extraFacets?: FacetCut[] ) { - ethTxOptions.gasLimit ??= 10_000_000; const genesisBatchHash = getHashFromEnv("CONTRACTS_GENESIS_ROOT"); // TODO: confusing name const genesisRollupLeafIndex = getNumberFromEnv("CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX"); const genesisBatchCommitment = getHashFromEnv("CONTRACTS_GENESIS_BATCH_COMMITMENT"); - const diamondCut = await this.initialZkSyncHyperchainDiamondCut(extraFacets); + const diamondCut = await this.initialZkSyncZKChainDiamondCut(extraFacets); const protocolVersion = packSemver(...unpackStringSemVer(process.env.CONTRACTS_GENESIS_PROTOCOL_SEMANTIC_VERSION)); - const stateTransitionManager = new Interface(hardhat.artifacts.readArtifactSync("StateTransitionManager").abi); - + const chainTypeManager = new Interface(hardhat.artifacts.readArtifactSync("ChainTypeManager").abi); + const forceDeploymentsData = await this.genesisForceDeploymentsData(); const chainCreationParams = { genesisUpgrade: this.addresses.StateTransition.GenesisUpgrade, genesisBatchHash, genesisIndexRepeatedStorageChanges: genesisRollupLeafIndex, genesisBatchCommitment, diamondCut, + forceDeploymentsData, }; - const initCalldata = stateTransitionManager.encodeFunctionData("initialize", [ + const initCalldata = chainTypeManager.encodeFunctionData("initialize", [ { owner: this.addresses.Governance, validatorTimelock: this.addresses.ValidatorTimeLock, @@ -323,7 +547,7 @@ export class Deployer { ]); const contractAddress = await this.deployViaCreate2( - "TransparentUpgradeableProxy", + "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol:TransparentUpgradeableProxy", [ this.addresses.StateTransition.StateTransitionImplementation, this.addresses.TransparentProxyAdmin, @@ -334,7 +558,7 @@ export class Deployer { ); if (this.verbose) { - console.log(`StateTransitionManagerProxy deployed, with protocol version: ${protocolVersion}`); + console.log(`ChainTypeManagerProxy deployed, with protocol version: ${protocolVersion}`); console.log(`CONTRACTS_STATE_TRANSITION_PROXY_ADDR=${contractAddress}`); } @@ -342,8 +566,12 @@ export class Deployer { } public async deployAdminFacet(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; - const contractAddress = await this.deployViaCreate2("AdminFacet", [], create2Salt, ethTxOptions); + const contractAddress = await this.deployViaCreate2( + "AdminFacet", + [await this.getL1ChainId()], + create2Salt, + ethTxOptions + ); if (this.verbose) { console.log(`CONTRACTS_ADMIN_FACET_ADDR=${contractAddress}`); @@ -353,9 +581,13 @@ export class Deployer { } public async deployMailboxFacet(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const eraChainId = getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"); - const contractAddress = await this.deployViaCreate2("MailboxFacet", [eraChainId], create2Salt, ethTxOptions); + const contractAddress = await this.deployViaCreate2( + "MailboxFacet", + [eraChainId, await this.getL1ChainId()], + create2Salt, + ethTxOptions + ); if (this.verbose) { console.log(`Mailbox deployed with era chain id: ${eraChainId}`); @@ -366,7 +598,6 @@ export class Deployer { } public async deployExecutorFacet(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const contractAddress = await this.deployViaCreate2("ExecutorFacet", [], create2Salt, ethTxOptions); if (this.verbose) { @@ -377,7 +608,6 @@ export class Deployer { } public async deployGettersFacet(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const contractAddress = await this.deployViaCreate2("GettersFacet", [], create2Salt, ethTxOptions); if (this.verbose) { @@ -388,8 +618,6 @@ export class Deployer { } public async deployVerifier(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; - let contractAddress: string; if (process.env.CHAIN_ETH_NETWORK === "mainnet") { @@ -410,10 +638,15 @@ export class Deployer { ethTxOptions: ethers.providers.TransactionRequest, dummy: boolean = false ) { - ethTxOptions.gasLimit ??= 10_000_000; + const eraChainId = getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"); const contractAddress = await this.deployViaCreate2( dummy ? "DummyL1ERC20Bridge" : "L1ERC20Bridge", - [this.addresses.Bridges.SharedBridgeProxy], + [ + this.addresses.Bridges.L1NullifierProxy, + this.addresses.Bridges.SharedBridgeProxy, + this.addresses.Bridges.NativeTokenVaultProxy, + eraChainId, + ], create2Salt, ethTxOptions ); @@ -426,24 +659,11 @@ export class Deployer { } public async setParametersSharedBridge() { - const sharedBridge = L1SharedBridgeFactory.connect(this.addresses.Bridges.SharedBridgeProxy, this.deployWallet); + const sharedBridge = L1AssetRouterFactory.connect(this.addresses.Bridges.SharedBridgeProxy, this.deployWallet); const data1 = sharedBridge.interface.encodeFunctionData("setL1Erc20Bridge", [ this.addresses.Bridges.ERC20BridgeProxy, ]); - const data2 = sharedBridge.interface.encodeFunctionData("setEraPostDiamondUpgradeFirstBatch", [ - process.env.CONTRACTS_ERA_POST_DIAMOND_UPGRADE_FIRST_BATCH ?? 1, - ]); - const data3 = sharedBridge.interface.encodeFunctionData("setEraPostLegacyBridgeUpgradeFirstBatch", [ - process.env.CONTRACTS_ERA_POST_LEGACY_BRIDGE_UPGRADE_FIRST_BATCH ?? 1, - ]); - const data4 = sharedBridge.interface.encodeFunctionData("setEraLegacyBridgeLastDepositTime", [ - process.env.CONTRACTS_ERA_LEGACY_UPGRADE_LAST_DEPOSIT_BATCH ?? 1, - process.env.CONTRACTS_ERA_LEGACY_UPGRADE_LAST_DEPOSIT_TX_NUMBER ?? 0, - ]); await this.executeUpgrade(this.addresses.Bridges.SharedBridgeProxy, 0, data1); - await this.executeUpgrade(this.addresses.Bridges.SharedBridgeProxy, 0, data2); - await this.executeUpgrade(this.addresses.Bridges.SharedBridgeProxy, 0, data3); - await this.executeUpgrade(this.addresses.Bridges.SharedBridgeProxy, 0, data4); if (this.verbose) { console.log("Shared bridge updated with ERC20Bridge address"); } @@ -456,14 +676,16 @@ export class Deployer { // eslint-disable-next-line @typescript-eslint/no-explicit-any fargs: any[], value: BigNumberish, - overrides?: Overrides, + overrides?: ethers.providers.TransactionRequest, printOperation: boolean = false ): Promise { if (useGovernance) { const cdata = contract.interface.encodeFunctionData(fname, fargs); - return this.executeUpgrade(contract.address, value, cdata, printOperation); + return this.executeUpgrade(contract.address, value, cdata, overrides, printOperation); } else { - const tx: ethers.ContractTransaction = await contract[fname](...fargs, ...(overrides ? [overrides] : [])); + overrides = overrides || {}; + overrides.value = value; + const tx: ethers.ContractTransaction = await contract[fname](...fargs, overrides); return await tx.wait(); } } @@ -473,6 +695,7 @@ export class Deployer { targetAddress: string, value: BigNumberish, callData: string, + ethTxOptions?: ethers.providers.TransactionRequest, printOperation: boolean = false ) { const governance = IGovernanceFactory.connect(this.addresses.Governance, this.deployWallet); @@ -498,7 +721,7 @@ export class Deployer { if (this.verbose) { console.log("Upgrade scheduled"); } - const executeTX = await governance.execute(operation, { value: value }); + const executeTX = await governance.execute(operation, { ...ethTxOptions, value: value }); const receipt = await executeTX.wait(); if (this.verbose) { console.log( @@ -511,15 +734,68 @@ export class Deployer { return receipt; } + /// this should be only use for local testing + public async executeUpgradeOnL2( + chainId: string, + targetAddress: string, + gasPrice: BigNumberish, + callData: string, + l2GasLimit: BigNumberish, + ethTxOptions?: ethers.providers.TransactionRequest, + printOperation: boolean = false + ) { + const bridgehub = this.bridgehubContract(this.deployWallet); + const value = await bridgehub.l2TransactionBaseCost( + chainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + const baseTokenAddress = await bridgehub.baseToken(chainId); + const ethIsBaseToken = baseTokenAddress == ADDRESS_ONE; + if (!ethIsBaseToken) { + const baseToken = TestnetERC20TokenFactory.connect(baseTokenAddress, this.deployWallet); + await (await baseToken.transfer(this.addresses.Governance, value)).wait(); + await this.executeUpgrade( + baseTokenAddress, + 0, + baseToken.interface.encodeFunctionData("approve", [this.addresses.Bridges.SharedBridgeProxy, value]) + ); + } + const l1Calldata = bridgehub.interface.encodeFunctionData("requestL2TransactionDirect", [ + { + chainId, + l2Contract: targetAddress, + mintValue: value, + l2Value: 0, + l2Calldata: callData, + l2GasLimit: l2GasLimit, + l2GasPerPubdataByteLimit: SYSTEM_CONFIG.requiredL2GasPricePerPubdata, + factoryDeps: [], + refundRecipient: this.deployWallet.address, + }, + ]); + const receipt = await this.executeUpgrade( + this.addresses.Bridgehub.BridgehubProxy, + ethIsBaseToken ? value : 0, + l1Calldata, + { + ...ethTxOptions, + gasPrice, + }, + printOperation + ); + return receipt; + } + // used for testing, mimics original deployment process. // we don't use the real implementation, as we need the address to be independent public async deployERC20BridgeProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const initCalldata = new Interface(hardhat.artifacts.readArtifactSync("L1ERC20Bridge").abi).encodeFunctionData( "initialize" ); const contractAddress = await this.deployViaCreate2( - "TransparentUpgradeableProxy", + "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol:TransparentUpgradeableProxy", [this.addresses.Bridges.ERC20BridgeImplementation, this.addresses.TransparentProxyAdmin, initCalldata], create2Salt, ethTxOptions @@ -531,18 +807,62 @@ export class Deployer { this.addresses.Bridges.ERC20BridgeProxy = contractAddress; } + public async deployL1NullifierImplementation(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + // const tokens = getTokens(); + // const l1WethToken = tokens.find((token: { symbol: string }) => token.symbol == "WETH")!.address; + const eraChainId = getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"); + const eraDiamondProxy = getAddressFromEnv("CONTRACTS_ERA_DIAMOND_PROXY_ADDR"); + const contractName = isCurrentNetworkLocal() ? "L1NullifierDev" : "L1Nullifier"; + const contractAddress = await this.deployViaCreate2( + contractName, + [this.addresses.Bridgehub.BridgehubProxy, eraChainId, eraDiamondProxy], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_L1_NULLIFIER_IMPL_ADDR=${contractAddress}`); + } + + this.addresses.Bridges.L1NullifierImplementation = contractAddress; + } + + public async deployL1NullifierProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + const initCalldata = new Interface(hardhat.artifacts.readArtifactSync("L1Nullifier").abi).encodeFunctionData( + "initialize", + [this.addresses.Governance, 1, 1, 1, 0] + ); + const contractAddress = await this.deployViaCreate2( + "TransparentUpgradeableProxy", + [this.addresses.Bridges.L1NullifierImplementation, this.addresses.TransparentProxyAdmin, initCalldata], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_L1_NULLIFIER_PROXY_ADDR=${contractAddress}`); + } + + this.addresses.Bridges.L1NullifierProxy = contractAddress; + } + public async deploySharedBridgeImplementation( create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest ) { - ethTxOptions.gasLimit ??= 10_000_000; const tokens = getTokens(); const l1WethToken = tokens.find((token: { symbol: string }) => token.symbol == "WETH")!.address; const eraChainId = getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"); const eraDiamondProxy = getAddressFromEnv("CONTRACTS_ERA_DIAMOND_PROXY_ADDR"); const contractAddress = await this.deployViaCreate2( - "L1SharedBridge", - [l1WethToken, this.addresses.Bridgehub.BridgehubProxy, eraChainId, eraDiamondProxy], + "L1AssetRouter", + [ + l1WethToken, + this.addresses.Bridgehub.BridgehubProxy, + this.addresses.Bridges.L1NullifierProxy, + eraChainId, + eraDiamondProxy, + ], create2Salt, ethTxOptions ); @@ -556,13 +876,12 @@ export class Deployer { } public async deploySharedBridgeProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; - const initCalldata = new Interface(hardhat.artifacts.readArtifactSync("L1SharedBridge").abi).encodeFunctionData( + const initCalldata = new Interface(hardhat.artifacts.readArtifactSync("L1AssetRouter").abi).encodeFunctionData( "initialize", [this.addresses.Governance] ); const contractAddress = await this.deployViaCreate2( - "TransparentUpgradeableProxy", + "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol:TransparentUpgradeableProxy", [this.addresses.Bridges.SharedBridgeImplementation, this.addresses.TransparentProxyAdmin, initCalldata], create2Salt, ethTxOptions @@ -575,9 +894,164 @@ export class Deployer { this.addresses.Bridges.SharedBridgeProxy = contractAddress; } - public async sharedBridgeSetEraPostUpgradeFirstBatch(ethTxOptions: ethers.providers.TransactionRequest) { + public async deployBridgedStandardERC20Implementation( + create2Salt: string, + ethTxOptions: ethers.providers.TransactionRequest + ) { + const contractAddress = await this.deployViaCreate2("BridgedStandardERC20", [], create2Salt, ethTxOptions); + + if (this.verbose) { + // console.log(`With era chain id ${eraChainId}`); + console.log(`CONTRACTS_L1_BRIDGED_STANDARD_ERC20_IMPL_ADDR=${contractAddress}`); + } + + this.addresses.Bridges.BridgedStandardERC20Implementation = contractAddress; + } + + public async deployBridgedTokenBeacon(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + /// Note we cannot use create2 as the deployer is the owner. ethTxOptions.gasLimit ??= 10_000_000; - const sharedBridge = L1SharedBridgeFactory.connect(this.addresses.Bridges.SharedBridgeProxy, this.deployWallet); + const contractFactory = await hardhat.ethers.getContractFactory( + "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol:UpgradeableBeacon", + { + signer: this.deployWallet, + } + ); + const beacon = await contractFactory.deploy( + ...[this.addresses.Bridges.BridgedStandardERC20Implementation, ethTxOptions] + ); + const rec = await beacon.deployTransaction.wait(); + + if (this.verbose) { + console.log("Beacon deployed with tx hash", rec.transactionHash); + console.log(`CONTRACTS_L1_BRIDGED_TOKEN_BEACON_ADDR=${beacon.address}`); + } + + this.addresses.Bridges.BridgedTokenBeacon = beacon.address; + + await beacon.transferOwnership(this.addresses.Governance); + } + + public async deployNativeTokenVaultImplementation( + create2Salt: string, + ethTxOptions: ethers.providers.TransactionRequest + ) { + const eraChainId = getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"); + const tokens = getTokens(); + const l1WethToken = tokens.find((token: { symbol: string }) => token.symbol == "WETH")!.address; + const contractAddress = await this.deployViaCreate2( + "L1NativeTokenVault", + [l1WethToken, this.addresses.Bridges.SharedBridgeProxy, eraChainId, this.addresses.Bridges.L1NullifierProxy], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + // console.log(`With era chain id ${eraChainId}`); + console.log(`CONTRACTS_L1_NATIVE_TOKEN_VAULT_IMPL_ADDR=${contractAddress}`); + } + + this.addresses.Bridges.NativeTokenVaultImplementation = contractAddress; + } + + public async deployNativeTokenVaultProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + const initCalldata = new Interface(hardhat.artifacts.readArtifactSync("L1NativeTokenVault").abi).encodeFunctionData( + "initialize", + [this.addresses.Governance, this.addresses.Bridges.BridgedTokenBeacon] + ); + const contractAddress = await this.deployViaCreate2( + "TransparentUpgradeableProxy", + [this.addresses.Bridges.NativeTokenVaultImplementation, this.addresses.TransparentProxyAdmin, initCalldata], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_L1_NATIVE_TOKEN_VAULT_PROXY_ADDR=${contractAddress}`); + } + + this.addresses.Bridges.NativeTokenVaultProxy = contractAddress; + + const nullifier = this.l1NullifierContract(this.deployWallet); + const assetRouter = this.defaultSharedBridge(this.deployWallet); + const ntv = this.nativeTokenVault(this.deployWallet); + + const data = await assetRouter.interface.encodeFunctionData("setNativeTokenVault", [ + this.addresses.Bridges.NativeTokenVaultProxy, + ]); + await this.executeUpgrade(this.addresses.Bridges.SharedBridgeProxy, 0, data); + if (this.verbose) { + console.log("Native token vault set in shared bridge"); + } + + const data2 = await nullifier.interface.encodeFunctionData("setL1NativeTokenVault", [ + this.addresses.Bridges.NativeTokenVaultProxy, + ]); + await this.executeUpgrade(this.addresses.Bridges.L1NullifierProxy, 0, data2); + if (this.verbose) { + console.log("Native token vault set in nullifier"); + } + + const data3 = await nullifier.interface.encodeFunctionData("setL1AssetRouter", [ + this.addresses.Bridges.SharedBridgeProxy, + ]); + await this.executeUpgrade(this.addresses.Bridges.L1NullifierProxy, 0, data3); + if (this.verbose) { + console.log("Asset router set in nullifier"); + } + + await (await this.nativeTokenVault(this.deployWallet).registerEthToken()).wait(); + + await ntv.registerEthToken(); + } + + public async deployCTMDeploymentTrackerImplementation( + create2Salt: string, + ethTxOptions: ethers.providers.TransactionRequest + ) { + const contractAddress = await this.deployViaCreate2( + "CTMDeploymentTracker", + [this.addresses.Bridgehub.BridgehubProxy, this.addresses.Bridges.SharedBridgeProxy], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_CTM_DEPLOYMENT_TRACKER_IMPL_ADDR=${contractAddress}`); + } + + this.addresses.Bridgehub.CTMDeploymentTrackerImplementation = contractAddress; + } + + public async deployCTMDeploymentTrackerProxy(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + const initCalldata = new Interface( + hardhat.artifacts.readArtifactSync("CTMDeploymentTracker").abi + ).encodeFunctionData("initialize", [this.addresses.Governance]); + const contractAddress = await this.deployViaCreate2( + "TransparentUpgradeableProxy", + [this.addresses.Bridgehub.CTMDeploymentTrackerImplementation, this.addresses.TransparentProxyAdmin, initCalldata], + create2Salt, + ethTxOptions + ); + + if (this.verbose) { + console.log(`CONTRACTS_CTM_DEPLOYMENT_TRACKER_PROXY_ADDR=${contractAddress}`); + } + + this.addresses.Bridgehub.CTMDeploymentTrackerProxy = contractAddress; + + // const bridgehub = this.bridgehubContract(this.deployWallet); + // const data0 = bridgehub.interface.encodeFunctionData("setCTMDeployer", [ + // this.addresses.Bridgehub.CTMDeploymentTrackerProxy, + // ]); + // await this.executeUpgrade(this.addresses.Bridgehub.BridgehubProxy, 0, data0); + // if (this.verbose) { + // console.log("CTM DT registered in Bridgehub"); + // } + } + + public async sharedBridgeSetEraPostUpgradeFirstBatch() { + const sharedBridge = L1AssetRouterFactory.connect(this.addresses.Bridges.SharedBridgeProxy, this.deployWallet); const storageSwitch = getNumberFromEnv("CONTRACTS_SHARED_BRIDGE_UPGRADE_STORAGE_SWITCH"); const tx = await sharedBridge.setEraPostUpgradeFirstBatch(storageSwitch); const receipt = await tx.wait(); @@ -586,20 +1060,43 @@ export class Deployer { } } - public async registerSharedBridge(ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; + public async registerAddresses() { const bridgehub = this.bridgehubContract(this.deployWallet); - /// registering ETH as a valid token, with address 1. - const tx2 = await bridgehub.addToken(ADDRESS_ONE); - const receipt2 = await tx2.wait(); + const upgradeData1 = await bridgehub.interface.encodeFunctionData("setAddresses", [ + this.addresses.Bridges.SharedBridgeProxy, + this.addresses.Bridgehub.CTMDeploymentTrackerProxy, + this.addresses.Bridgehub.MessageRootProxy, + ]); + await this.executeUpgrade(this.addresses.Bridgehub.BridgehubProxy, 0, upgradeData1); + if (this.verbose) { + console.log("Shared bridge was registered in Bridgehub"); + } + } + + public async registerTokenBridgehub(tokenAddress: string, useGovernance: boolean = false) { + const bridgehub = this.bridgehubContract(this.deployWallet); + const baseTokenAssetId = encodeNTVAssetId(this.l1ChainId, tokenAddress); + const receipt = await this.executeDirectOrGovernance( + useGovernance, + bridgehub, + "addTokenAssetId", + [baseTokenAssetId], + 0 + ); - const tx3 = await bridgehub.setSharedBridge(this.addresses.Bridges.SharedBridgeProxy); - const receipt3 = await tx3.wait(); if (this.verbose) { - console.log( - `Shared bridge was registered, gas used: ${receipt3.gasUsed.toString()} and ${receipt2.gasUsed.toString()}` - ); + console.log(`Token ${tokenAddress} was registered, gas used: ${receipt.gasUsed.toString()}`); + } + } + + public async registerTokenInNativeTokenVault(token: string) { + const nativeTokenVault = this.nativeTokenVault(this.deployWallet); + + const data = nativeTokenVault.interface.encodeFunctionData("registerToken", [token]); + await this.executeUpgrade(this.addresses.Bridges.NativeTokenVaultProxy, 0, data); + if (this.verbose) { + console.log("Native token vault registered with token", token); } } @@ -607,7 +1104,6 @@ export class Deployer { create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest ) { - ethTxOptions.gasLimit ??= 10_000_000; const contractAddress = await this.deployViaCreate2("DiamondInit", [], create2Salt, ethTxOptions); if (this.verbose) { @@ -618,7 +1114,6 @@ export class Deployer { } public async deployDefaultUpgrade(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const contractAddress = await this.deployViaCreate2("DefaultUpgrade", [], create2Salt, ethTxOptions); if (this.verbose) { @@ -628,20 +1123,18 @@ export class Deployer { this.addresses.StateTransition.DefaultUpgrade = contractAddress; } - public async deployHyperchainsUpgrade(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; - const contractAddress = await this.deployViaCreate2("UpgradeHyperchains", [], create2Salt, ethTxOptions); + public async deployZKChainsUpgrade(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + const contractAddress = await this.deployViaCreate2("UpgradeZKChains", [], create2Salt, ethTxOptions); if (this.verbose) { - console.log(`CONTRACTS_HYPERCHAIN_UPGRADE_ADDR=${contractAddress}`); + console.log(`CONTRACTS_ZK_CHAIN_UPGRADE_ADDR=${contractAddress}`); } this.addresses.StateTransition.DefaultUpgrade = contractAddress; } public async deployGenesisUpgrade(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; - const contractAddress = await this.deployViaCreate2("GenesisUpgrade", [], create2Salt, ethTxOptions); + const contractAddress = await this.deployViaCreate2("L1GenesisUpgrade", [], create2Salt, ethTxOptions); if (this.verbose) { console.log(`CONTRACTS_GENESIS_UPGRADE_ADDR=${contractAddress}`); @@ -655,20 +1148,21 @@ export class Deployer { await this.deployBridgehubImplementation(create2Salt, { gasPrice, nonce }); await this.deployBridgehubProxy(create2Salt, { gasPrice }); + await this.deployMessageRootImplementation(create2Salt, { gasPrice }); + await this.deployMessageRootProxy(create2Salt, { gasPrice }); } - public async deployStateTransitionManagerContract( + public async deployChainTypeManagerContract( create2Salt: string, extraFacets?: FacetCut[], gasPrice?: BigNumberish, nonce? ) { nonce = nonce ? parseInt(nonce) : await this.deployWallet.getTransactionCount(); - await this.deployStateTransitionDiamondFacets(create2Salt, gasPrice, nonce); - await this.deployStateTransitionManagerImplementation(create2Salt, { gasPrice }); - await this.deployStateTransitionManagerProxy(create2Salt, { gasPrice }, extraFacets); - await this.registerStateTransitionManager(); + await this.deployChainTypeManagerImplementation(create2Salt, { gasPrice }); + await this.deployChainTypeManagerProxy(create2Salt, { gasPrice }, extraFacets); + await this.registerChainTypeManager(); } public async deployStateTransitionDiamondFacets(create2Salt: string, gasPrice?: BigNumberish, nonce?) { @@ -681,40 +1175,206 @@ export class Deployer { await this.deployStateTransitionDiamondInit(create2Salt, { gasPrice, nonce: nonce + 4 }); } - public async registerStateTransitionManager() { + public async registerChainTypeManager() { const bridgehub = this.bridgehubContract(this.deployWallet); - if (!(await bridgehub.stateTransitionManagerIsRegistered(this.addresses.StateTransition.StateTransitionProxy))) { - const tx = await bridgehub.addStateTransitionManager(this.addresses.StateTransition.StateTransitionProxy); - - const receipt = await tx.wait(); - if (this.verbose) { - console.log(`StateTransition System registered, gas used: ${receipt.gasUsed.toString()}`); + if (!(await bridgehub.chainTypeManagerIsRegistered(this.addresses.StateTransition.StateTransitionProxy))) { + const upgradeData = bridgehub.interface.encodeFunctionData("addChainTypeManager", [ + this.addresses.StateTransition.StateTransitionProxy, + ]); + + let receipt1; + if (!this.isZkMode()) { + receipt1 = await this.executeUpgrade(this.addresses.Bridgehub.BridgehubProxy, 0, upgradeData); + if (this.verbose) { + console.log(`StateTransition System registered, gas used: ${receipt1.gasUsed.toString()}`); + } + + const ctmDeploymentTracker = this.ctmDeploymentTracker(this.deployWallet); + + const l1AssetRouter = this.defaultSharedBridge(this.deployWallet); + const whitelistData = l1AssetRouter.interface.encodeFunctionData("setAssetDeploymentTracker", [ + ethers.utils.hexZeroPad(this.addresses.StateTransition.StateTransitionProxy, 32), + ctmDeploymentTracker.address, + ]); + const receipt2 = await this.executeUpgrade(l1AssetRouter.address, 0, whitelistData); + if (this.verbose) { + console.log("CTM deployment tracker whitelisted in L1 Shared Bridge", receipt2.gasUsed.toString()); + console.log( + `CONTRACTS_CTM_ASSET_INFO=${await bridgehub.ctmAssetId(this.addresses.StateTransition.StateTransitionProxy)}` + ); + } + + const data1 = ctmDeploymentTracker.interface.encodeFunctionData("registerCTMAssetOnL1", [ + this.addresses.StateTransition.StateTransitionProxy, + ]); + const receipt3 = await this.executeUpgrade(this.addresses.Bridgehub.CTMDeploymentTrackerProxy, 0, data1); + if (this.verbose) { + console.log( + "CTM asset registered in L1 Shared Bridge via CTM Deployment Tracker", + receipt3.gasUsed.toString() + ); + console.log( + `CONTRACTS_CTM_ASSET_INFO=${await bridgehub.ctmAssetId(this.addresses.StateTransition.StateTransitionProxy)}` + ); + } + } else { + console.log(`CONTRACTS_CTM_ASSET_INFO=${getHashFromEnv("CONTRACTS_CTM_ASSET_INFO")}`); } } } - public async registerHyperchain( - baseTokenAddress: string, + public async registerSettlementLayer() { + const bridgehub = this.bridgehubContract(this.deployWallet); + const calldata = bridgehub.interface.encodeFunctionData("registerSettlementLayer", [this.chainId, true]); + await this.executeUpgrade(this.addresses.Bridgehub.BridgehubProxy, 0, calldata); + if (this.verbose) { + console.log("Gateway registered"); + } + } + + // Main function to move the current chain (that is hooked to l1), on top of the syncLayer chain. + public async moveChainToGateway(gatewayChainId: string, gasPrice: BigNumberish) { + const protocolVersion = packSemver(...unpackStringSemVer(process.env.CONTRACTS_GENESIS_PROTOCOL_SEMANTIC_VERSION)); + const chainData = ethers.utils.defaultAbiCoder.encode(["uint256"], [protocolVersion]); + const bridgehub = this.bridgehubContract(this.deployWallet); + // Just some large gas limit that should always be enough + const l2GasLimit = ethers.BigNumber.from(72_000_000); + const expectedCost = ( + await bridgehub.l2TransactionBaseCost(gatewayChainId, gasPrice, l2GasLimit, REQUIRED_L2_GAS_PRICE_PER_PUBDATA) + ).mul(5); + + // We are creating the new DiamondProxy for our chain, to be deployed on top of sync Layer. + const newAdmin = this.deployWallet.address; + const diamondCutData = await this.initialZkSyncZKChainDiamondCut(); + const initialDiamondCut = new ethers.utils.AbiCoder().encode([DIAMOND_CUT_DATA_ABI_STRING], [diamondCutData]); + + const ctmData = new ethers.utils.AbiCoder().encode(["uint256", "bytes"], [newAdmin, initialDiamondCut]); + const bridgehubData = new ethers.utils.AbiCoder().encode( + [BRIDGEHUB_CTM_ASSET_DATA_ABI_STRING], + [[this.chainId, ctmData, chainData]] + ); + + // console.log("bridgehubData", bridgehubData) + // console.log("this.addresses.ChainAssetInfo", this.addresses.ChainAssetInfo) + + // The ctmAssetIFromChainId gives us a unique 'asset' identifier for a given chain. + const chainAssetId = await bridgehub.ctmAssetIdFromChainId(this.chainId); + if (this.verbose) { + console.log("Chain asset id is: ", chainAssetId); + console.log(`CONTRACTS_CTM_ASSET_INFO=${chainAssetId}`); + } + + let sharedBridgeData = ethers.utils.defaultAbiCoder.encode( + ["bytes32", "bytes"], + + [chainAssetId, bridgehubData] + ); + // The 0x01 is the encoding for the L1AssetRouter. + sharedBridgeData = "0x01" + sharedBridgeData.slice(2); + + // And now we 'transfer' the chain through the bridge (it behaves like a 'regular' asset, where we 'freeze' it in L1 + // and then create on SyncLayer). You can see these methods in Admin.sol (part of DiamondProxy). + const receipt = await this.executeChainAdminMulticall([ + { + target: bridgehub.address, + data: bridgehub.interface.encodeFunctionData("requestL2TransactionTwoBridges", [ + // These arguments must match L2TransactionRequestTwoBridgesOuter struct. + { + chainId: gatewayChainId, + mintValue: expectedCost, + l2Value: 0, + l2GasLimit: l2GasLimit, + l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + refundRecipient: await this.deployWallet.getAddress(), + secondBridgeAddress: this.addresses.Bridges.SharedBridgeProxy, + secondBridgeValue: 0, + secondBridgeCalldata: sharedBridgeData, + }, + ]), + value: expectedCost, + }, + ]); + + return receipt; + } + + public async finishMoveChainToL1(synclayerChainId: number) { + const nullifier = this.l1NullifierContract(this.deployWallet); + // const baseTokenAmount = ethers.utils.parseEther("1"); + // const chainData = new ethers.utils.AbiCoder().encode(["uint256", "bytes"], [ADDRESS_ONE, "0x"]); // todo + // const bridgehubData = new ethers.utils.AbiCoder().encode(["uint256", "bytes"], [this.chainId, chainData]); + // console.log("bridgehubData", bridgehubData) + // console.log("this.addresses.ChainAssetInfo", this.addresses.ChainAssetInfo) + // const sharedBridgeData = ethers.utils.defaultAbiCoder.encode( + // ["bytes32", "bytes"], + + // [await bridgehub.ctmAssetInfoFromChainId(this.chainId), bridgehubData] + // ); + const l2BatchNumber = 1; + const l2MsgIndex = 1; + const l2TxNumberInBatch = 1; + const message = ethers.utils.defaultAbiCoder.encode(["bytes32", "bytes"], []); + const merkleProof = ["0x00"]; + const tx = await nullifier.finalizeWithdrawal( + synclayerChainId, + l2BatchNumber, + l2MsgIndex, + l2TxNumberInBatch, + message, + merkleProof + ); + const receipt = await tx.wait(); + if (this.verbose) { + console.log("Chain move to L1 finished", receipt.gasUsed.toString()); + } + } + + public async registerZKChain( + baseTokenAssetId: string, validiumMode: boolean, extraFacets?: FacetCut[], gasPrice?: BigNumberish, + compareDiamondCutHash: boolean = false, nonce?, predefinedChainId?: string, - useGovernance: boolean = false + useGovernance: boolean = false, + l2LegacySharedBridge: boolean = false ) { - const gasLimit = 10_000_000; + const txOptions = this.isZkMode() ? {} : { gasLimit: 10_000_000 }; nonce = nonce ? parseInt(nonce) : await this.deployWallet.getTransactionCount(); const bridgehub = this.bridgehubContract(this.deployWallet); - const stateTransitionManager = this.stateTransitionManagerContract(this.deployWallet); + const chainTypeManager = this.chainTypeManagerContract(this.deployWallet); + const ntv = this.nativeTokenVault(this.deployWallet); + const baseTokenAddress = await ntv.tokenAddress(baseTokenAssetId); const inputChainId = predefinedChainId || getNumberFromEnv("CHAIN_ETH_ZKSYNC_NETWORK_ID"); + const alreadyRegisteredInCTM = (await chainTypeManager.getZKChain(inputChainId)) != ethers.constants.AddressZero; + + if (l2LegacySharedBridge) { + if (this.verbose) { + console.log("Setting L2 legacy shared bridge in L1Nullifier"); + } + await this.setL2LegacySharedBridgeInL1Nullifier(inputChainId); + nonce++; + } + const admin = process.env.CHAIN_ADMIN_ADDRESS || this.ownerAddress; - const diamondCutData = await this.initialZkSyncHyperchainDiamondCut(extraFacets); + const diamondCutData = await this.initialZkSyncZKChainDiamondCut(extraFacets, compareDiamondCutHash); const initialDiamondCut = new ethers.utils.AbiCoder().encode([DIAMOND_CUT_DATA_ABI_STRING], [diamondCutData]); - + const forceDeploymentsData = await this.genesisForceDeploymentsData(); + const initData = ethers.utils.defaultAbiCoder.encode(["bytes", "bytes"], [initialDiamondCut, forceDeploymentsData]); + let factoryDeps = []; + if (process.env.CHAIN_ETH_NETWORK != "hardhat") { + factoryDeps = [ + L2_STANDARD_ERC20_PROXY_FACTORY.bytecode, + L2_STANDARD_ERC20_IMPLEMENTATION.bytecode, + L2_STANDARD_TOKEN_PROXY.bytecode, + ]; + } + // note the factory deps are provided at genesis const receipt = await this.executeDirectOrGovernance( useGovernance, bridgehub, @@ -722,19 +1382,18 @@ export class Deployer { [ inputChainId, this.addresses.StateTransition.StateTransitionProxy, - baseTokenAddress, + baseTokenAssetId, Date.now(), admin, - initialDiamondCut, + initData, + factoryDeps, ], 0, { gasPrice, - nonce, - gasLimit, + ...txOptions, } ); - const chainId = receipt.logs.find((log) => log.topics[0] == bridgehub.interface.getEventTopic("NewChain")) .topics[1]; @@ -745,27 +1404,32 @@ export class Deployer { } this.addresses.BaseToken = baseTokenAddress; + this.addresses.BaseTokenAssetId = baseTokenAssetId; if (this.verbose) { - console.log(`Hyperchain registered, gas used: ${receipt.gasUsed.toString()} and ${receipt.gasUsed.toString()}`); - console.log(`Hyperchain registration tx hash: ${receipt.transactionHash}`); + console.log(`ZK chain registered, gas used: ${receipt.gasUsed.toString()} and ${receipt.gasUsed.toString()}`); + console.log(`ZK chain registration tx hash: ${receipt.transactionHash}`); console.log(`CHAIN_ETH_ZKSYNC_NETWORK_ID=${parseInt(chainId, 16)}`); - + console.log( + `CONTRACTS_CTM_ASSET_INFO=${await bridgehub.ctmAssetId(this.addresses.StateTransition.StateTransitionProxy)}` + ); console.log(`CONTRACTS_BASE_TOKEN_ADDR=${baseTokenAddress}`); } - if (!predefinedChainId) { + + if (!alreadyRegisteredInCTM) { const diamondProxyAddress = "0x" + receipt.logs - .find((log) => log.topics[0] == stateTransitionManager.interface.getEventTopic("NewHyperchain")) + .find((log) => log.topics[0] == chainTypeManager.interface.getEventTopic("NewZKChain")) .topics[2].slice(26); this.addresses.StateTransition.DiamondProxy = diamondProxyAddress; if (this.verbose) { console.log(`CONTRACTS_DIAMOND_PROXY_ADDR=${diamondProxyAddress}`); } } - this.chainId = parseInt(chainId, 16); + const intChainId = parseInt(chainId, 16); + this.chainId = intChainId; const validatorOneAddress = getAddressFromEnv("ETH_SENDER_SENDER_OPERATOR_COMMIT_ETH_ADDR"); const validatorTwoAddress = getAddressFromEnv("ETH_SENDER_SENDER_OPERATOR_BLOBS_ETH_ADDR"); @@ -773,14 +1437,13 @@ export class Deployer { const txRegisterValidator = await validatorTimelock.addValidator(chainId, validatorOneAddress, { gasPrice, nonce, - gasLimit, + ...txOptions, }); const receiptRegisterValidator = await txRegisterValidator.wait(); if (this.verbose) { console.log( - `Validator registered, gas used: ${receiptRegisterValidator.gasUsed.toString()}, tx hash: ${ - txRegisterValidator.hash - }` + `Validator registered, gas used: ${receiptRegisterValidator.gasUsed.toString()}, tx hash: + ${txRegisterValidator.hash}` ); } @@ -789,7 +1452,7 @@ export class Deployer { const tx3 = await validatorTimelock.addValidator(chainId, validatorTwoAddress, { gasPrice, nonce, - gasLimit, + ...txOptions, }); const receipt3 = await tx3.wait(); if (this.verbose) { @@ -797,21 +1460,200 @@ export class Deployer { } const diamondProxy = this.stateTransitionContract(this.deployWallet); - const tx4 = await diamondProxy.setTokenMultiplier(1, 1); - const receipt4 = await tx4.wait(); - if (this.verbose) { - console.log(`BaseTokenMultiplier set, gas used: ${receipt4.gasUsed.toString()}`); + // if we are using governance, the deployer will not be the admin, so we can't call the diamond proxy directly + if (admin == this.deployWallet.address) { + const tx4 = await diamondProxy.setTokenMultiplier(1, 1); + const receipt4 = await tx4.wait(); + if (this.verbose) { + console.log(`BaseTokenMultiplier set, gas used: ${receipt4.gasUsed.toString()}`); + } + + if (validiumMode) { + const tx5 = await diamondProxy.setPubdataPricingMode(PubdataPricingMode.Validium); + const receipt5 = await tx5.wait(); + if (this.verbose) { + console.log(`Validium mode set, gas used: ${receipt5.gasUsed.toString()}`); + } + } + } else { + console.warn( + "BaseTokenMultiplier and Validium mode can't be set through the governance, please set it separately, using the admin account" + ); } - if (validiumMode) { - const tx5 = await diamondProxy.setPubdataPricingMode(PubdataPricingMode.Validium); - const receipt5 = await tx5.wait(); + if (l2LegacySharedBridge) { + await this.deployL2LegacySharedBridge(inputChainId, gasPrice); + } + } + + public async setL2LegacySharedBridgeInL1Nullifier(inputChainId: string) { + const l1Nullifier = L1NullifierDevFactory.connect(this.addresses.Bridges.L1NullifierProxy, this.deployWallet); + const l1SharedBridge = this.defaultSharedBridge(this.deployWallet); + + if (isCurrentNetworkLocal()) { + const l2SharedBridgeImplementationBytecode = L2_SHARED_BRIDGE_IMPLEMENTATION.bytecode; + + const l2SharedBridgeImplAddress = computeL2Create2Address( + this.deployWallet.address, + l2SharedBridgeImplementationBytecode, + "0x", + ethers.constants.HashZero + ); + + const l2GovernorAddress = applyL1ToL2Alias(this.addresses.Governance); + + const l2SharedBridgeInterface = new Interface(L2_SHARED_BRIDGE_IMPLEMENTATION.abi); + const proxyInitializationParams = l2SharedBridgeInterface.encodeFunctionData("initialize", [ + l1SharedBridge.address, + hashL2Bytecode(L2_STANDARD_TOKEN_PROXY.bytecode), + l2GovernorAddress, + ]); + + const l2SharedBridgeProxyConstructorData = ethers.utils.arrayify( + new ethers.utils.AbiCoder().encode( + ["address", "address", "bytes"], + [l2SharedBridgeImplAddress, l2GovernorAddress, proxyInitializationParams] + ) + ); + + /// compute L2SharedBridgeProxy address + const l2SharedBridgeProxyAddress = computeL2Create2Address( + this.deployWallet.address, + L2_SHARED_BRIDGE_PROXY.bytecode, + l2SharedBridgeProxyConstructorData, + ethers.constants.HashZero + ); + + const tx = await l1Nullifier.setL2LegacySharedBridge(inputChainId, l2SharedBridgeProxyAddress); + const receipt8 = await tx.wait(); if (this.verbose) { - console.log(`Validium mode set, gas used: ${receipt5.gasUsed.toString()}`); + console.log(`L2 legacy shared bridge set in L1 Nullifier, gas used: ${receipt8.gasUsed.toString()}`); } } } + public async deployL2LegacySharedBridge(inputChainId: string, gasPrice: BigNumberish) { + if (this.verbose) { + console.log("Deploying L2 legacy shared bridge"); + } + await this.deploySharedBridgeImplOnL2ThroughL1(inputChainId, gasPrice); + await this.deploySharedBridgeProxyOnL2ThroughL1(inputChainId, gasPrice); + } + + public async deploySharedBridgeImplOnL2ThroughL1(chainId: string, gasPrice: BigNumberish) { + if (this.verbose) { + console.log("Deploying L2SharedBridge Implementation"); + } + const eraChainId = getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"); + + const l2SharedBridgeImplementationBytecode = L2_SHARED_BRIDGE_IMPLEMENTATION.bytecode; + // localLegacyBridgeTesting + // ? L2_DEV_SHARED_BRIDGE_IMPLEMENTATION.bytecode + // : L2_SHARED_BRIDGE_IMPLEMENTATION.bytecode; + if (!l2SharedBridgeImplementationBytecode) { + throw new Error("l2SharedBridgeImplementationBytecode not found"); + } + + if (this.verbose) { + console.log("l2SharedBridgeImplementationBytecode loaded"); + + console.log("Computing L2SharedBridge Implementation Address"); + } + + const l2SharedBridgeImplAddress = computeL2Create2Address( + this.deployWallet.address, + l2SharedBridgeImplementationBytecode, + "0x", + ethers.constants.HashZero + ); + this.addresses.Bridges.L2LegacySharedBridgeImplementation = l2SharedBridgeImplAddress; + + if (this.verbose) { + console.log(`L2SharedBridge Implementation Address: ${l2SharedBridgeImplAddress}`); + + console.log("Deploying L2SharedBridge Implementation"); + } + // TODO: request from API how many L2 gas needs for the transaction. + const tx2 = await create2DeployFromL1( + chainId, + this.deployWallet, + l2SharedBridgeImplementationBytecode, + ethers.utils.defaultAbiCoder.encode(["uint256"], [eraChainId]), + ethers.constants.HashZero, + priorityTxMaxGasLimit, + gasPrice, + [L2_STANDARD_TOKEN_PROXY.bytecode], + this.addresses.Bridgehub.BridgehubProxy, + this.addresses.Bridges.SharedBridgeProxy + ); + await tx2.wait(); + + if (this.verbose) { + console.log("Deployed L2SharedBridge Implementation"); + console.log(`CONTRACTS_L2_LEGACY_SHARED_BRIDGE_IMPL_ADDR=${l2SharedBridgeImplAddress}`); + } + } + + public async deploySharedBridgeProxyOnL2ThroughL1(chainId: string, gasPrice: BigNumberish) { + const l1SharedBridge = this.defaultSharedBridge(this.deployWallet); + if (this.verbose) { + console.log("Deploying L2SharedBridge Proxy"); + } + const l2GovernorAddress = applyL1ToL2Alias(this.addresses.Governance); + + const l2SharedBridgeInterface = new Interface(L2_SHARED_BRIDGE_IMPLEMENTATION.abi); + const proxyInitializationParams = l2SharedBridgeInterface.encodeFunctionData("initialize", [ + l1SharedBridge.address, + hashL2Bytecode(L2_STANDARD_TOKEN_PROXY.bytecode), + l2GovernorAddress, + ]); + + /// prepare constructor data + const l2SharedBridgeProxyConstructorData = ethers.utils.arrayify( + new ethers.utils.AbiCoder().encode( + ["address", "address", "bytes"], + [this.addresses.Bridges.L2LegacySharedBridgeImplementation, l2GovernorAddress, proxyInitializationParams] + ) + ); + + /// compute L2SharedBridgeProxy address + const l2SharedBridgeProxyAddress = computeL2Create2Address( + this.deployWallet.address, + L2_SHARED_BRIDGE_PROXY.bytecode, + l2SharedBridgeProxyConstructorData, + ethers.constants.HashZero + ); + this.addresses.Bridges.L2LegacySharedBridgeProxy = l2SharedBridgeProxyAddress; + + /// deploy L2SharedBridgeProxy + // TODO: request from API how many L2 gas needs for the transaction. + const tx3 = await create2DeployFromL1( + chainId, + this.deployWallet, + L2_SHARED_BRIDGE_PROXY.bytecode, + l2SharedBridgeProxyConstructorData, + ethers.constants.HashZero, + priorityTxMaxGasLimit, + gasPrice, + undefined, + this.addresses.Bridgehub.BridgehubProxy, + this.addresses.Bridges.SharedBridgeProxy + ); + await tx3.wait(); + if (this.verbose) { + console.log(`CONTRACTS_L2_LEGACY_SHARED_BRIDGE_ADDR=${l2SharedBridgeProxyAddress}`); + } + } + + public async executeChainAdminMulticall(calls: ChainAdminCall[], requireSuccess: boolean = true) { + const chainAdmin = ChainAdminFactory.connect(this.addresses.ChainAdmin, this.deployWallet); + + const totalValue = calls.reduce((acc, call) => acc.add(call.value), ethers.BigNumber.from(0)); + + const multicallTx = await chainAdmin.multicall(calls, requireSuccess, { value: totalValue }); + return await multicallTx.wait(); + } + public async setTokenMultiplierSetterAddress(tokenMultiplierSetterAddress: string) { const chainAdmin = ChainAdminFactory.connect(this.addresses.ChainAdmin, this.deployWallet); @@ -824,54 +1666,49 @@ export class Deployer { } public async transferAdminFromDeployerToChainAdmin() { - const stm = this.stateTransitionManagerContract(this.deployWallet); - const diamondProxyAddress = await stm.getHyperchain(this.chainId); - const hyperchain = IZkSyncHyperchainFactory.connect(diamondProxyAddress, this.deployWallet); + const ctm = this.chainTypeManagerContract(this.deployWallet); + const diamondProxyAddress = await ctm.getZKChain(this.chainId); + const zkChain = IZKChainFactory.connect(diamondProxyAddress, this.deployWallet); - const receipt = await (await hyperchain.setPendingAdmin(this.addresses.ChainAdmin)).wait(); + const receipt = await (await zkChain.setPendingAdmin(this.addresses.ChainAdmin)).wait(); if (this.verbose) { console.log(`ChainAdmin set as pending admin, gas used: ${receipt.gasUsed.toString()}`); } - const acceptAdminData = hyperchain.interface.encodeFunctionData("acceptAdmin"); - const chainAdmin = ChainAdminFactory.connect(this.addresses.ChainAdmin, this.deployWallet); - const multicallTx = await chainAdmin.multicall( - [ - { - target: hyperchain.address, - value: 0, - data: acceptAdminData, - }, - ], - true - ); - await multicallTx.wait(); + const acceptAdminData = zkChain.interface.encodeFunctionData("acceptAdmin"); + await this.executeChainAdminMulticall([ + { + target: zkChain.address, + value: 0, + data: acceptAdminData, + }, + ]); if (this.verbose) { console.log("Pending admin successfully accepted"); } } - public async registerToken(tokenAddress: string, useGovernance: boolean = false) { - const bridgehub = this.bridgehubContract(this.deployWallet); - - const receipt = await this.executeDirectOrGovernance(useGovernance, bridgehub, "addToken", [tokenAddress], 0); - - if (this.verbose) { - console.log(`Token ${tokenAddress} was registered, gas used: ${receipt.gasUsed.toString()}`); - } - } - public async deploySharedBridgeContracts(create2Salt: string, gasPrice?: BigNumberish, nonce?) { nonce = nonce ? parseInt(nonce) : await this.deployWallet.getTransactionCount(); + await this.deployL1NullifierImplementation(create2Salt, { gasPrice, nonce: nonce }); + await this.deployL1NullifierProxy(create2Salt, { gasPrice, nonce: nonce + 1 }); + + nonce = nonce + 2; await this.deploySharedBridgeImplementation(create2Salt, { gasPrice, nonce: nonce }); await this.deploySharedBridgeProxy(create2Salt, { gasPrice, nonce: nonce + 1 }); - await this.registerSharedBridge({ gasPrice, nonce: nonce + 2 }); + nonce = nonce + 2; + await this.deployBridgedStandardERC20Implementation(create2Salt, { gasPrice, nonce: nonce }); + await this.deployBridgedTokenBeacon(create2Salt, { gasPrice, nonce: nonce + 1 }); + await this.deployNativeTokenVaultImplementation(create2Salt, { gasPrice, nonce: nonce + 3 }); + await this.deployNativeTokenVaultProxy(create2Salt, { gasPrice }); + await this.deployCTMDeploymentTrackerImplementation(create2Salt, { gasPrice }); + await this.deployCTMDeploymentTrackerProxy(create2Salt, { gasPrice }); + await this.registerAddresses(); } public async deployValidatorTimelock(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const executionDelay = getNumberFromEnv("CONTRACTS_VALIDATOR_TIMELOCK_EXECUTION_DELAY"); const eraChainId = getNumberFromEnv("CONTRACTS_ERA_CHAIN_ID"); const contractAddress = await this.deployViaCreate2( @@ -887,20 +1724,19 @@ export class Deployer { this.addresses.ValidatorTimeLock = contractAddress; } - public async setStateTransitionManagerInValidatorTimelock(ethTxOptions: ethers.providers.TransactionRequest) { + public async setChainTypeManagerInValidatorTimelock(ethTxOptions: ethers.providers.TransactionRequest) { const validatorTimelock = this.validatorTimelock(this.deployWallet); - const tx = await validatorTimelock.setStateTransitionManager( + const tx = await validatorTimelock.setChainTypeManager( this.addresses.StateTransition.StateTransitionProxy, ethTxOptions ); const receipt = await tx.wait(); if (this.verbose) { - console.log(`StateTransitionManager was set in ValidatorTimelock, gas used: ${receipt.gasUsed.toString()}`); + console.log(`ChainTypeManager was set in ValidatorTimelock, gas used: ${receipt.gasUsed.toString()}`); } } public async deployMulticall3(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { - ethTxOptions.gasLimit ??= 10_000_000; const contractAddress = await this.deployViaCreate2("Multicall3", [], create2Salt, ethTxOptions); if (this.verbose) { @@ -908,11 +1744,59 @@ export class Deployer { } } + public async deployDAValidators(create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest) { + ethTxOptions.gasLimit ??= 10_000_000; + + // This address only makes sense on the L1, but we deploy it anyway to keep the script simple + const rollupValidatorBytecode = await this.loadFromDAFolder("RollupL1DAValidator"); + const rollupDAValidatorAddress = await this.deployViaCreate2( + "RollupL1DAValidator", + [], + create2Salt, + ethTxOptions, + undefined, + rollupValidatorBytecode + ); + if (this.verbose) { + console.log(`CONTRACTS_L1_ROLLUP_DA_VALIDATOR=${rollupDAValidatorAddress}`); + } + const validiumDAValidatorAddress = await this.deployViaCreate2( + "ValidiumL1DAValidator", + [], + create2Salt, + ethTxOptions, + undefined + ); + + if (this.verbose) { + console.log(`CONTRACTS_L1_VALIDIUM_DA_VALIDATOR=${validiumDAValidatorAddress}`); + } + // This address only makes sense on the Sync Layer, but we deploy it anyway to keep the script simple + const relayedSLDAValidator = await this.deployViaCreate2("RelayedSLDAValidator", [], create2Salt, ethTxOptions); + if (this.verbose) { + console.log(`CONTRACTS_L1_RELAYED_SL_DA_VALIDATOR=${relayedSLDAValidator}`); + } + this.addresses.RollupL1DAValidator = rollupDAValidatorAddress; + this.addresses.ValidiumL1DAValidator = validiumDAValidatorAddress; + this.addresses.RelayedSLDAValidator = relayedSLDAValidator; + } + + public async updateBlobVersionedHashRetrieverZkMode() { + if (!this.isZkMode()) { + throw new Error("`updateBlobVersionedHashRetrieverZk` should be only called when deploying on zkSync network"); + } + + console.log("BlobVersionedHashRetriever is not needed within zkSync network and won't be deployed"); + + // 0 is not allowed, we need to some random non-zero value. Let it be 0x1000000000000000000000000000000000000001 + console.log("CONTRACTS_BLOB_VERSIONED_HASH_RETRIEVER_ADDR=0x1000000000000000000000000000000000000001"); + this.addresses.BlobVersionedHashRetriever = "0x1000000000000000000000000000000000000001"; + } + public async deployBlobVersionedHashRetriever( create2Salt: string, ethTxOptions: ethers.providers.TransactionRequest ) { - ethTxOptions.gasLimit ??= 10_000_000; // solc contracts/zksync/utils/blobVersionedHashRetriever.yul --strict-assembly --bin const bytecode = "0x600b600b5f39600b5ff3fe5f358049805f5260205ff3"; @@ -939,18 +1823,15 @@ export class Deployer { } public bridgehubContract(signerOrProvider: Signer | providers.Provider) { - return IBridgehubFactory.connect(this.addresses.Bridgehub.BridgehubProxy, signerOrProvider); + return BridgehubFactory.connect(this.addresses.Bridgehub.BridgehubProxy, signerOrProvider); } - public stateTransitionManagerContract(signerOrProvider: Signer | providers.Provider) { - return IStateTransitionManagerFactory.connect( - this.addresses.StateTransition.StateTransitionProxy, - signerOrProvider - ); + public chainTypeManagerContract(signerOrProvider: Signer | providers.Provider) { + return ChainTypeManagerFactory.connect(this.addresses.StateTransition.StateTransitionProxy, signerOrProvider); } public stateTransitionContract(signerOrProvider: Signer | providers.Provider) { - return IZkSyncHyperchainFactory.connect(this.addresses.StateTransition.DiamondProxy, signerOrProvider); + return IZKChainFactory.connect(this.addresses.StateTransition.DiamondProxy, signerOrProvider); } public governanceContract(signerOrProvider: Signer | providers.Provider) { @@ -962,7 +1843,19 @@ export class Deployer { } public defaultSharedBridge(signerOrProvider: Signer | providers.Provider) { - return L1SharedBridgeFactory.connect(this.addresses.Bridges.SharedBridgeProxy, signerOrProvider); + return IL1AssetRouterFactory.connect(this.addresses.Bridges.SharedBridgeProxy, signerOrProvider); + } + + public l1NullifierContract(signerOrProvider: Signer | providers.Provider) { + return IL1NullifierFactory.connect(this.addresses.Bridges.L1NullifierProxy, signerOrProvider); + } + + public nativeTokenVault(signerOrProvider: Signer | providers.Provider) { + return IL1NativeTokenVaultFactory.connect(this.addresses.Bridges.NativeTokenVaultProxy, signerOrProvider); + } + + public ctmDeploymentTracker(signerOrProvider: Signer | providers.Provider) { + return ICTMDeploymentTrackerFactory.connect(this.addresses.Bridgehub.CTMDeploymentTrackerProxy, signerOrProvider); } public baseTokenContract(signerOrProvider: Signer | providers.Provider) { @@ -972,4 +1865,9 @@ export class Deployer { public proxyAdminContract(signerOrProvider: Signer | providers.Provider) { return ProxyAdminFactory.connect(this.addresses.TransparentProxyAdmin, signerOrProvider); } + + private async getL1ChainId(): Promise { + const l1ChainId = this.isZkMode() ? getNumberFromEnv("ETH_CLIENT_CHAIN_ID") : await this.deployWallet.getChainId(); + return +l1ChainId; + } } diff --git a/l1-contracts/src.ts/diamondCut.ts b/l1-contracts/src.ts/diamondCut.ts index f9eaadf0e..ca44029bf 100644 --- a/l1-contracts/src.ts/diamondCut.ts +++ b/l1-contracts/src.ts/diamondCut.ts @@ -3,11 +3,8 @@ import type { Interface } from "ethers/lib/utils"; import "@nomiclabs/hardhat-ethers"; import type { Wallet, BigNumberish } from "ethers"; import { ethers } from "ethers"; -import { IZkSyncHyperchainFactory } from "../typechain/IZkSyncHyperchainFactory"; -import { IZkSyncHyperchainBaseFactory } from "../typechain/IZkSyncHyperchainBaseFactory"; - -// Some of the facets are to be removed with the upcoming upgrade. -const UNCONDITIONALLY_REMOVED_FACETS = ["DiamondCutFacet", "GovernanceFacet"]; +import { IZKChainFactory } from "../typechain/IZKChainFactory"; +import { IZKChainBaseFactory } from "../typechain/IZKChainBaseFactory"; export enum Action { Add = 0, @@ -101,12 +98,12 @@ export async function getCurrentFacetCutsForAdd( } export async function getDeployedFacetCutsForRemove(wallet: Wallet, zkSyncAddress: string, updatedFaceNames: string[]) { - const mainContract = IZkSyncHyperchainFactory.connect(zkSyncAddress, wallet); + const mainContract = IZKChainFactory.connect(zkSyncAddress, wallet); const diamondCutFacets = await mainContract.facets(); // We don't care about freezing, because we are removing the facets. const result = []; for (const { addr, selectors } of diamondCutFacets) { - const facet = IZkSyncHyperchainBaseFactory.connect(addr, wallet); + const facet = IZKChainBaseFactory.connect(addr, wallet); const facetName = await facet.getName(); if (updatedFaceNames.includes(facetName)) { result.push({ @@ -131,10 +128,7 @@ export async function getFacetCutsForUpgrade( namesOfFacetsToBeRemoved?: string[] ) { const newFacetCuts = await getCurrentFacetCutsForAdd(adminAddress, gettersAddress, mailboxAddress, executorAddress); - namesOfFacetsToBeRemoved = namesOfFacetsToBeRemoved || [ - ...UNCONDITIONALLY_REMOVED_FACETS, - ...Object.keys(newFacetCuts), - ]; + namesOfFacetsToBeRemoved = namesOfFacetsToBeRemoved || [...Object.keys(newFacetCuts)]; const oldFacetCuts = await getDeployedFacetCutsForRemove(wallet, zkSyncAddress, namesOfFacetsToBeRemoved); return [...oldFacetCuts, ...Object.values(newFacetCuts)]; } diff --git a/l1-contracts/src.ts/utils.ts b/l1-contracts/src.ts/utils.ts index ca18bc7e4..190a5743c 100644 --- a/l1-contracts/src.ts/utils.ts +++ b/l1-contracts/src.ts/utils.ts @@ -9,7 +9,8 @@ import * as path from "path"; import { DiamondInitFactory } from "../typechain"; import type { DiamondCut, FacetCut } from "./diamondCut"; import { diamondCut } from "./diamondCut"; -import { SYSTEM_CONFIG } from "../scripts/utils"; +import { SYSTEM_CONFIG, web3Url } from "../scripts/utils"; +import { Wallet as ZkWallet, Provider } from "zksync-ethers"; export const testConfigPath = process.env.ZKSYNC_ENV ? path.join(process.env.ZKSYNC_HOME as string, "etc/test_config/constant") @@ -21,13 +22,33 @@ export const REQUIRED_L2_GAS_PRICE_PER_PUBDATA = require("../../SystemConfig.jso export const SYSTEM_UPGRADE_L2_TX_TYPE = 254; export const ADDRESS_ONE = "0x0000000000000000000000000000000000000001"; +export const ETH_ADDRESS_IN_CONTRACTS = ADDRESS_ONE; export const L1_TO_L2_ALIAS_OFFSET = "0x1111000000000000000000000000000000001111"; +export const L2_BRIDGEHUB_ADDRESS = "0x0000000000000000000000000000000000010002"; +export const L2_ASSET_ROUTER_ADDRESS = "0x0000000000000000000000000000000000010003"; +export const L2_NATIVE_TOKEN_VAULT_ADDRESS = "0x0000000000000000000000000000000000010004"; +export const L2_MESSAGE_ROOT_ADDRESS = "0x0000000000000000000000000000000000010005"; +export const DEPLOYER_SYSTEM_CONTRACT_ADDRESS = "0x0000000000000000000000000000000000008006"; export const EMPTY_STRING_KECCAK = "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"; const CREATE2_PREFIX = ethers.utils.solidityKeccak256(["string"], ["zksyncCreate2"]); +export const priorityTxMaxGasLimit = getNumberFromEnv("CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT"); + const ADDRESS_MODULO = ethers.BigNumber.from(2).pow(160); +export const STORED_BATCH_INFO_ABI_STRING = + "tuple(uint64 batchNumber, bytes32 batchHash, uint64 indexRepeatedStorageChanges, uint256 numberOfLayer1Txs, bytes32 priorityOperationsHash, bytes32 l2LogsTreeRoot, uint256 timestamp, bytes32 commitment)"; +export const COMMIT_BATCH_INFO_ABI_STRING = + "tuple(uint64 batchNumber, uint64 timestamp, uint64 indexRepeatedStorageChanges, bytes32 newStateRoot, uint256 numberOfLayer1Txs, bytes32 priorityOperationsHash, bytes32 bootloaderHeapInitialContentsHash, bytes32 eventsQueueStateHash, bytes systemLogs, bytes operatorDAInput)"; +export const PRIORITY_OPS_BATCH_INFO_ABI_STRING = + "tuple(bytes32[] leftPath, bytes32[] rightPath, bytes32[] itemHashes)"; export const DIAMOND_CUT_DATA_ABI_STRING = "tuple(tuple(address facet, uint8 action, bool isFreezable, bytes4[] selectors)[] facetCuts, address initAddress, bytes initCalldata)"; +export const FORCE_DEPLOYMENT_ABI_STRING = + "tuple(bytes32 bytecodeHash, address newAddress, bool callConstructor, uint256 value, bytes input)[]"; +export const BRIDGEHUB_CTM_ASSET_DATA_ABI_STRING = "tuple(uint256 chainId, bytes ctmData, bytes chainData)"; +export const FIXED_FORCE_DEPLOYMENTS_DATA_ABI_STRING = + "tuple(uint256 l1ChainId, uint256 eraChainId, address l1AssetRouter, bytes32 l2TokenProxyBytecodeHash, address aliasedL1Governance, uint256 maxNumberOfZKChains, bytes32 bridgehubBytecodeHash, bytes32 l2AssetRouterBytecodeHash, bytes32 l2NtvBytecodeHash, bytes32 messageRootBytecodeHash, address l2SharedBridgeLegacyImpl, address l2BridgedStandardERC20Impl, address l2BridgeProxyOwnerAddress, address l2BridgedStandardERC20ProxyOwnerAddress)"; +export const ADDITIONAL_FORCE_DEPLOYMENTS_DATA_ABI_STRING = "tuple(bytes32 baseTokenAssetId, address l2Weth)"; export function applyL1ToL2Alias(address: string): string { return ethers.utils.hexlify(ethers.BigNumber.from(address).add(L1_TO_L2_ALIAS_OFFSET).mod(ADDRESS_MODULO)); @@ -42,6 +63,10 @@ export function readInterface(path: string, fileName: string) { return new ethers.utils.Interface(abi); } +export function readContract(path: string, fileName: string) { + return JSON.parse(fs.readFileSync(`${path}/${fileName}.sol/${fileName}.json`, { encoding: "utf-8" })); +} + export function hashL2Bytecode(bytecode: ethers.BytesLike): Uint8Array { // For getting the consistent length we first convert the bytecode to UInt8Array const bytecodeAsArray = ethers.utils.arrayify(bytecode); @@ -92,6 +117,15 @@ export function computeL2Create2Address( return ethers.utils.hexDataSlice(data, 12); } +export function encodeNTVAssetId(chainId: number, tokenAddress: BytesLike) { + return ethers.utils.keccak256( + ethers.utils.defaultAbiCoder.encode( + ["uint256", "address", "bytes32"], + [chainId, L2_NATIVE_TOKEN_VAULT_ADDRESS, ethers.utils.hexZeroPad(tokenAddress, 32)] + ) + ); +} + export function getAddressFromEnv(envName: string): string { const address = process.env[envName]; if (!/^0x[a-fA-F0-9]{40}$/.test(address)) { @@ -179,6 +213,20 @@ export interface L2CanonicalTransaction { reservedDynamic: BytesLike; } +export function ethersWalletToZkWallet(wallet: ethers.Wallet): ZkWallet { + return new ZkWallet(wallet.privateKey, new Provider(web3Url())); +} + +export function isZKMode(): boolean { + return process.env.CONTRACTS_BASE_NETWORK_ZKSYNC === "true"; +} + +const LOCAL_NETWORKS = ["localhost", "hardhat", "localhostL2"]; + +export function isCurrentNetworkLocal(): boolean { + return LOCAL_NETWORKS.includes(process.env.CHAIN_ETH_NETWORK); +} + // Checks that the initial cut hash params are valid. // Sometimes it makes sense to allow dummy values for testing purposes, but in production // these values should be set correctly. @@ -268,12 +316,11 @@ export function compileInitialCutHash( { chainId: "0x0000000000000000000000000000000000000000000000000000000000000001", bridgehub: "0x0000000000000000000000000000000000001234", - stateTransitionManager: "0x0000000000000000000000000000000000002234", + chainTypeManager: "0x0000000000000000000000000000000000002234", protocolVersion: "0x0000000000000000000000000000000000002234", admin: "0x0000000000000000000000000000000000003234", validatorTimelock: "0x0000000000000000000000000000000000004234", - baseToken: "0x0000000000000000000000000000000000004234", - baseTokenBridge: "0x0000000000000000000000000000000000004234", + baseTokenAssetId: "0x0000000000000000000000000000000000000000000000000000000000004234", storedBatchZero: "0x0000000000000000000000000000000000000000000000000000000000005432", verifier, verifierParams, @@ -285,5 +332,16 @@ export function compileInitialCutHash( }, ]); - return diamondCut(facetCuts, diamondInit, "0x" + diamondInitCalldata.slice(2 + (4 + 9 * 32) * 2)); + return diamondCut(facetCuts, diamondInit, "0x" + diamondInitCalldata.slice(2 + (4 + 8 * 32) * 2)); +} + +export enum PubdataSource { + Rollup, + Validium, +} + +export interface ChainAdminCall { + target: string; + value: BigNumberish; + data: BytesLike; } diff --git a/l1-contracts/test/foundry/l1/integration/AssetRouterTest.t.sol b/l1-contracts/test/foundry/l1/integration/AssetRouterTest.t.sol new file mode 100644 index 000000000..7d122638d --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/AssetRouterTest.t.sol @@ -0,0 +1,192 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {Vm} from "forge-std/Vm.sol"; +import {console2 as console} from "forge-std/console2.sol"; + +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; +import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {L1ContractDeployer} from "./_SharedL1ContractDeployer.t.sol"; +import {TokenDeployer} from "./_SharedTokenDeployer.t.sol"; +import {ZKChainDeployer} from "./_SharedZKChainDeployer.t.sol"; +import {L2TxMocker} from "./_SharedL2TxMocker.t.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK} from "contracts/common/Config.sol"; +import {L2CanonicalTransaction, L2Message} from "contracts/common/Messaging.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {IL1Nullifier, FinalizeL1DepositParams} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IAssetRouterBase, LEGACY_ENCODING_VERSION, NEW_ENCODING_VERSION} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {BridgeHelper} from "contracts/bridge/BridgeHelper.sol"; +import {BridgedStandardERC20, NonSequentialVersion} from "contracts/bridge/BridgedStandardERC20.sol"; +import {IBridgedStandardToken} from "contracts/bridge/BridgedStandardERC20.sol"; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; + +contract AssetRouterTest is L1ContractDeployer, ZKChainDeployer, TokenDeployer, L2TxMocker { + uint256 constant TEST_USERS_COUNT = 10; + address[] public users; + address[] public l2ContractAddresses; + bytes32 public l2TokenAssetId; + address public tokenL1Address; + // generate MAX_USERS addresses and append it to users array + function _generateUserAddresses() internal { + require(users.length == 0, "Addresses already generated"); + + for (uint256 i = 0; i < TEST_USERS_COUNT; i++) { + address newAddress = makeAddr(string(abi.encode("account", i))); + users.push(newAddress); + } + } + + function prepare() public { + _generateUserAddresses(); + + _deployL1Contracts(); + _deployTokens(); + _registerNewTokens(tokens); + + _deployEra(); + // _deployHyperchain(ETH_TOKEN_ADDRESS); + // _deployHyperchain(ETH_TOKEN_ADDRESS); + // _deployHyperchain(tokens[0]); + // _deployHyperchain(tokens[0]); + // _deployHyperchain(tokens[1]); + // _deployHyperchain(tokens[1]); + + for (uint256 i = 0; i < zkChainIds.length; i++) { + address contractAddress = makeAddr(string(abi.encode("contract", i))); + l2ContractAddresses.push(contractAddress); + + _addL2ChainContract(zkChainIds[i], contractAddress); + } + } + + function setUp() public { + prepare(); + } + + function depositToL1(address _tokenAddress) public { + vm.mockCall( + address(bridgehub), + abi.encodeWithSelector(IBridgehub.proveL2MessageInclusion.selector), + abi.encode(true) + ); + uint256 chainId = eraZKChainId; + l2TokenAssetId = DataEncoding.encodeNTVAssetId(chainId, _tokenAddress); + bytes memory transferData = DataEncoding.encodeBridgeMintData({ + _originalCaller: ETH_TOKEN_ADDRESS, + _l2Receiver: address(this), + _l1Token: ETH_TOKEN_ADDRESS, + _amount: 100, + _erc20Metadata: BridgeHelper.getERC20Getters(_tokenAddress, chainId) + }); + l1Nullifier.finalizeDeposit( + FinalizeL1DepositParams({ + chainId: chainId, + l2BatchNumber: 1, + l2MessageIndex: 1, + l2Sender: L2_ASSET_ROUTER_ADDR, + l2TxNumberInBatch: 1, + message: abi.encodePacked( + IAssetRouterBase.finalizeDeposit.selector, + chainId, + l2TokenAssetId, + transferData + ), + merkleProof: new bytes32[](0) + }) + ); + tokenL1Address = l1NativeTokenVault.tokenAddress(l2TokenAssetId); + } + + function test_DepositToL1_Success() public { + depositToL1(ETH_TOKEN_ADDRESS); + } + + function test_BridgeTokenFunctions() public { + depositToL1(ETH_TOKEN_ADDRESS); + BridgedStandardERC20 bridgedToken = BridgedStandardERC20(l1NativeTokenVault.tokenAddress(l2TokenAssetId)); + assertEq(bridgedToken.name(), "Ether"); + assertEq(bridgedToken.symbol(), "ETH"); + assertEq(bridgedToken.decimals(), 18); + } + + function test_reinitBridgedToken_Success() public { + depositToL1(ETH_TOKEN_ADDRESS); + BridgedStandardERC20 bridgedToken = BridgedStandardERC20(l1NativeTokenVault.tokenAddress(l2TokenAssetId)); + address owner = l1NativeTokenVault.owner(); + vm.broadcast(owner); + bridgedToken.reinitializeToken( + BridgedStandardERC20.ERC20Getters({ignoreName: false, ignoreSymbol: false, ignoreDecimals: false}), + "TestnetERC20Token", + "TST", + 2 + ); + } + + function test_reinitBridgedToken_WrongVersion() public { + depositToL1(ETH_TOKEN_ADDRESS); + BridgedStandardERC20 bridgedToken = BridgedStandardERC20(l1NativeTokenVault.tokenAddress(l2TokenAssetId)); + vm.expectRevert(NonSequentialVersion.selector); + bridgedToken.reinitializeToken( + BridgedStandardERC20.ERC20Getters({ignoreName: false, ignoreSymbol: false, ignoreDecimals: false}), + "TestnetERC20Token", + "TST", + 3 + ); + } + + /// @dev We should not test this on the L1, but to get coverage we do. + function test_BridgeTokenBurn() public { + depositToL1(ETH_TOKEN_ADDRESS); + BridgedStandardERC20 bridgedToken = BridgedStandardERC20(l1NativeTokenVault.tokenAddress(l2TokenAssetId)); + // setting nativeTokenVault to zero address. + vm.store(address(bridgedToken), bytes32(uint256(207)), bytes32(0)); + vm.mockCall( + address(L2_NATIVE_TOKEN_VAULT_ADDR), + abi.encodeWithSelector(INativeTokenVault.L1_CHAIN_ID.selector), + abi.encode(block.chainid) + ); + vm.broadcast(L2_NATIVE_TOKEN_VAULT_ADDR); // kl todo call ntv, or even assetRouter/bridgehub + bridgedToken.bridgeBurn(address(this), 100); + } + + function test_DepositToL1AndWithdraw() public { + depositToL1(ETH_TOKEN_ADDRESS); + bytes memory secondBridgeCalldata = bytes.concat( + NEW_ENCODING_VERSION, + abi.encode(l2TokenAssetId, abi.encode(uint256(100), address(this))) + ); + IERC20(tokenL1Address).approve(address(l1NativeTokenVault), 100); + bridgehub.requestL2TransactionTwoBridges{value: 250000000000100}( + L2TransactionRequestTwoBridgesOuter({ + chainId: eraZKChainId, + mintValue: 250000000000100, + l2Value: 0, + l2GasLimit: 1000000, + l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + refundRecipient: address(0), + secondBridgeAddress: address(sharedBridge), + secondBridgeValue: 0, + secondBridgeCalldata: secondBridgeCalldata + }) + ); + } + + // add this to be excluded from coverage report + function test() internal override {} +} diff --git a/l1-contracts/test/foundry/l1/integration/BridgeHubInvariantTests.t.sol b/l1-contracts/test/foundry/l1/integration/BridgeHubInvariantTests.t.sol new file mode 100644 index 000000000..b4fec78de --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/BridgeHubInvariantTests.t.sol @@ -0,0 +1,741 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {Vm} from "forge-std/Vm.sol"; + +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; +import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {L1ContractDeployer} from "./_SharedL1ContractDeployer.t.sol"; +import {TokenDeployer} from "./_SharedTokenDeployer.t.sol"; +import {ZKChainDeployer} from "./_SharedZKChainDeployer.t.sol"; +import {L2TxMocker} from "./_SharedL2TxMocker.t.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK} from "contracts/common/Config.sol"; +import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; +import {L2Message} from "contracts/common/Messaging.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; + +contract BridgeHubInvariantTests is L1ContractDeployer, ZKChainDeployer, TokenDeployer, L2TxMocker { + uint256 constant TEST_USERS_COUNT = 10; + + bytes32 constant NEW_PRIORITY_REQUEST_HASH = + keccak256( + "NewPriorityRequest(uint256,bytes32,uint64,(uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256[4],bytes,bytes,uint256[],bytes,bytes),bytes[])" + ); + + enum RequestType { + DIRECT, + TWO_BRIDGES + } + + struct NewPriorityRequest { + uint256 txId; + bytes32 txHash; + uint64 expirationTimestamp; + L2CanonicalTransaction transaction; + bytes[] factoryDeps; + } + + address[] public users; + address[] public l2ContractAddresses; + address[] public addressesToExclude; + address public currentUser; + uint256 public currentChainId; + address public currentChainAddress; + address public currentTokenAddress = ETH_TOKEN_ADDRESS; + TestnetERC20Token currentToken; + + // Amounts deposited by each user, mapped by user address and token address + mapping(address user => mapping(address token => uint256 deposited)) public depositsUsers; + // Amounts deposited into the bridge, mapped by ZK chain address and token address + mapping(address chain => mapping(address token => uint256 deposited)) public depositsBridge; + // Total sum of deposits into the bridge, mapped by token address + mapping(address token => uint256 deposited) public tokenSumDeposit; + // Total sum of withdrawn tokens, mapped by token address + mapping(address token => uint256 deposited) public tokenSumWithdrawal; + // Total sum of L2 values transferred to mock contracts, mapped by token address + mapping(address token => uint256 deposited) public l2ValuesSum; + // Deposits into the ZK chains contract, mapped by L2 contract address and token address + mapping(address l2contract => mapping(address token => uint256 balance)) public contractDeposits; + // Total sum of deposits into all L2 contracts, mapped by token address + mapping(address token => uint256 deposited) public contractDepositsSum; + + // gets random user from users array, set contract variables + modifier useUser(uint256 userIndexSeed) { + currentUser = users[bound(userIndexSeed, 0, users.length - 1)]; + vm.startPrank(currentUser); + _; + vm.stopPrank(); + } + + // gets random ZK chain from ZK chain ids, set contract variables + modifier useZKChain(uint256 chainIndexSeed) { + currentChainId = zkChainIds[bound(chainIndexSeed, 0, zkChainIds.length - 1)]; + currentChainAddress = getZKChainAddress(currentChainId); + _; + } + + // use token specified by address, set contract variables + modifier useGivenToken(address tokenAddress) { + currentToken = TestnetERC20Token(tokenAddress); + currentTokenAddress = tokenAddress; + _; + } + + // use random token from tokens array, set contract variables + modifier useRandomToken(uint256 tokenIndexSeed) { + currentTokenAddress = tokens[bound(tokenIndexSeed, 0, tokens.length - 1)]; + currentToken = TestnetERC20Token(currentTokenAddress); + _; + } + + // use base token as main token + // watch out, do not use with ETH + modifier useBaseToken() { + currentToken = TestnetERC20Token(getZKChainBaseToken(currentChainId)); + currentTokenAddress = address(currentToken); + _; + } + + // use ERC token by getting randomly token + // it keeps iterating while the token is ETH + modifier useERC20Token(uint256 tokenIndexSeed) { + currentTokenAddress = tokens[bound(tokenIndexSeed, 0, tokens.length - 1)]; + + while (currentTokenAddress == ETH_TOKEN_ADDRESS) { + tokenIndexSeed += 1; + currentTokenAddress = tokens[bound(tokenIndexSeed, 0, tokens.length - 1)]; + } + + currentToken = TestnetERC20Token(currentTokenAddress); + + _; + } + + // generate MAX_USERS addresses and append it to users array + function _generateUserAddresses() internal { + require(users.length == 0, "Addresses already generated"); + + for (uint256 i = 0; i < TEST_USERS_COUNT; i++) { + address newAddress = makeAddr(string(abi.encode("account", i))); + users.push(newAddress); + } + } + + // TODO: consider what should be actually committed, do we need to simulate operator: + // blocks -> batches -> commits or just mock it. + function _commitBatchInfo(uint256 _chainId) internal { + //vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1 + 1); + + GettersFacet zkChainGetters = GettersFacet(getZKChainAddress(_chainId)); + + IExecutor.StoredBatchInfo memory batchZero; + + batchZero.batchNumber = 0; + batchZero.timestamp = 0; + batchZero.numberOfLayer1Txs = 0; + batchZero.priorityOperationsHash = EMPTY_STRING_KECCAK; + batchZero.l2LogsTreeRoot = DEFAULT_L2_LOGS_TREE_ROOT_HASH; + batchZero.batchHash = vm.parseBytes32("0x0000000000000000000000000000000000000000000000000000000000000000"); //genesis root hash + batchZero.indexRepeatedStorageChanges = uint64(0); + batchZero.commitment = vm.parseBytes32("0x0000000000000000000000000000000000000000000000000000000000000000"); + + bytes32 hashedZeroBatch = keccak256(abi.encode(batchZero)); + assertEq(zkChainGetters.storedBatchHash(0), hashedZeroBatch); + } + + // use mailbox interface to return exact amount to use as a gas on l2 side, + // prevents from failing if mintValue < l2Value + required gas + function _getMinRequiredGasPriceForChain( + uint256 _chainId, + uint256 _gasPrice, + uint256 _l2GasLimit, + uint256 _l2GasPerPubdataByteLimit + ) public view returns (uint256) { + MailboxFacet chainMailBox = MailboxFacet(getZKChainAddress(_chainId)); + + return chainMailBox.l2TransactionBaseCost(_gasPrice, _l2GasLimit, _l2GasPerPubdataByteLimit); + } + + // decodes data encoded with encodeCall, this is just to decode information received from logs + // to deposit into mock l2 contract + function _getDecodedDepositL2Calldata( + bytes memory callData + ) internal view returns (address l1Sender, address l2Receiver, address l1Token, uint256 amount, bytes memory b) { + // UnsafeBytes approach doesn't work, because abi is not deterministic + bytes memory slicedData = new bytes(callData.length - 4); + + for (uint256 i = 4; i < callData.length; i++) { + slicedData[i - 4] = callData[i]; + } + + (l1Sender, l2Receiver, l1Token, amount, b) = abi.decode( + slicedData, + (address, address, address, uint256, bytes) + ); + } + + // handle event emitted from logs, just to ensure proper decoding to set mock contract balance + function _handleRequestByMockL2Contract(NewPriorityRequest memory request, RequestType requestType) internal { + address contractAddress = address(uint160(uint256(request.transaction.to))); + + address tokenAddress; + address receiver; + uint256 toSend; + address l1Sender; + uint256 balanceAfter; + bytes memory temp; + + if (requestType == RequestType.TWO_BRIDGES) { + (l1Sender, receiver, tokenAddress, toSend, temp) = _getDecodedDepositL2Calldata(request.transaction.data); + } else { + (tokenAddress, toSend, receiver) = abi.decode(request.transaction.data, (address, uint256, address)); + } + + assertEq(contractAddress, receiver); + + if (tokenAddress == ETH_TOKEN_ADDRESS) { + uint256 balanceBefore = contractAddress.balance; + vm.deal(contractAddress, toSend + balanceBefore); + + balanceAfter = contractAddress.balance; + } else { + TestnetERC20Token token = TestnetERC20Token(tokenAddress); + token.mint(contractAddress, toSend); + + balanceAfter = token.balanceOf(contractAddress); + } + + contractDeposits[contractAddress][tokenAddress] += toSend; + contractDepositsSum[tokenAddress] += toSend; + assertEq(balanceAfter, contractDeposits[contractAddress][tokenAddress]); + } + + // gets event from logs + function _getNewPriorityQueueFromLogs(Vm.Log[] memory logs) internal returns (NewPriorityRequest memory request) { + for (uint256 i = 0; i < logs.length; i++) { + Vm.Log memory log = logs[i]; + + if (log.topics[0] == NEW_PRIORITY_REQUEST_HASH) { + ( + request.txId, + request.txHash, + request.expirationTimestamp, + request.transaction, + request.factoryDeps + ) = abi.decode(log.data, (uint256, bytes32, uint64, L2CanonicalTransaction, bytes[])); + } + } + } + + // deposits ERC20 token to the ZK chain where base token is ETH + // this function use requestL2TransactionTwoBridges function from shared bridge. + // tokenAddress should be any ERC20 token, excluding ETH + function depositERC20ToEthChain(uint256 l2Value, address tokenAddress) private useGivenToken(tokenAddress) { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = minRequiredGas; + vm.deal(currentUser, mintValue); + + currentToken.mint(currentUser, l2Value); + currentToken.approve(address(sharedBridge), l2Value); + + bytes memory secondBridgeCallData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestTwoBridgesOuter memory requestTx = _createL2TransactionRequestTwoBridges({ + _chainId: currentChainId, + _mintValue: mintValue, + _secondBridgeValue: 0, + _secondBridgeAddress: address(sharedBridge), + _l2Value: 0, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _secondBridgeCalldata: secondBridgeCallData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionTwoBridges{value: mintValue}(requestTx); + Vm.Log[] memory logs = vm.getRecordedLogs(); + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.TWO_BRIDGES); + + depositsUsers[currentUser][ETH_TOKEN_ADDRESS] += mintValue; + depositsBridge[currentChainAddress][ETH_TOKEN_ADDRESS] += mintValue; + tokenSumDeposit[ETH_TOKEN_ADDRESS] += mintValue; + + depositsUsers[currentUser][currentTokenAddress] += l2Value; + depositsBridge[currentChainAddress][currentTokenAddress] += l2Value; + tokenSumDeposit[currentTokenAddress] += l2Value; + l2ValuesSum[currentTokenAddress] += l2Value; + } + + // deposits ETH token to chain where base token is some ERC20 + // modifier prevents you from using some other token as base + function depositEthToERC20Chain(uint256 l2Value) private useBaseToken { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + vm.deal(currentUser, l2Value); + uint256 mintValue = minRequiredGas; + currentToken.mint(currentUser, mintValue); + currentToken.approve(address(sharedBridge), mintValue); + + bytes memory secondBridgeCallData = abi.encode(ETH_TOKEN_ADDRESS, uint256(0), chainContracts[currentChainId]); + L2TransactionRequestTwoBridgesOuter memory requestTx = _createL2TransactionRequestTwoBridges({ + _chainId: currentChainId, + _mintValue: mintValue, + _secondBridgeValue: l2Value, + _secondBridgeAddress: address(sharedBridge), + _l2Value: 0, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _secondBridgeCalldata: secondBridgeCallData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionTwoBridges{value: l2Value}(requestTx); + Vm.Log[] memory logs = vm.getRecordedLogs(); + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.TWO_BRIDGES); + + depositsUsers[currentUser][ETH_TOKEN_ADDRESS] += l2Value; + depositsBridge[currentChainAddress][ETH_TOKEN_ADDRESS] += l2Value; + tokenSumDeposit[ETH_TOKEN_ADDRESS] += l2Value; + l2ValuesSum[ETH_TOKEN_ADDRESS] += l2Value; + + depositsUsers[currentUser][currentTokenAddress] += mintValue; + depositsBridge[currentChainAddress][currentTokenAddress] += mintValue; + tokenSumDeposit[currentTokenAddress] += mintValue; + } + + // deposits ERC20 to token with base being also ERC20 + // there are no modifiers so watch out, baseTokenAddress should be base of ZK chain + // currentToken should be different from base + function depositERC20ToERC20Chain(uint256 l2Value, address baseTokenAddress) private { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = minRequiredGas; + + TestnetERC20Token baseToken = TestnetERC20Token(baseTokenAddress); + baseToken.mint(currentUser, mintValue); + baseToken.approve(address(sharedBridge), mintValue); + + currentToken.mint(currentUser, l2Value); + currentToken.approve(address(sharedBridge), l2Value); + + bytes memory secondBridgeCallData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestTwoBridgesOuter memory requestTx = _createL2TransactionRequestTwoBridges({ + _chainId: currentChainId, + _mintValue: mintValue, + _secondBridgeValue: 0, + _secondBridgeAddress: address(sharedBridge), + _l2Value: 0, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _secondBridgeCalldata: secondBridgeCallData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionTwoBridges(requestTx); + Vm.Log[] memory logs = vm.getRecordedLogs(); + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.TWO_BRIDGES); + + depositsUsers[currentUser][baseTokenAddress] += mintValue; + depositsBridge[currentChainAddress][baseTokenAddress] += mintValue; + tokenSumDeposit[baseTokenAddress] += mintValue; + + depositsUsers[currentUser][currentTokenAddress] += l2Value; + depositsBridge[currentChainAddress][currentTokenAddress] += l2Value; + tokenSumDeposit[currentTokenAddress] += l2Value; + l2ValuesSum[currentTokenAddress] += l2Value; + } + + // deposits ETH to ZK chain where base is ETH + function depositEthBase(uint256 l2Value) private { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; // reverts with 8 + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = l2Value + minRequiredGas; + vm.deal(currentUser, mintValue); + + bytes memory callData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestDirect memory txRequest = _createL2TransactionRequestDirect({ + _chainId: currentChainId, + _mintValue: mintValue, + _l2Value: l2Value, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _l2CallData: callData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionDirect{value: mintValue}(txRequest); + Vm.Log[] memory logs = vm.getRecordedLogs(); + + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.DIRECT); + + depositsUsers[currentUser][ETH_TOKEN_ADDRESS] += mintValue; + depositsBridge[currentChainAddress][ETH_TOKEN_ADDRESS] += mintValue; + tokenSumDeposit[ETH_TOKEN_ADDRESS] += mintValue; + l2ValuesSum[ETH_TOKEN_ADDRESS] += l2Value; + } + + // deposits base ERC20 token to the bridge + function depositERC20Base(uint256 l2Value) private useBaseToken { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + vm.deal(currentUser, gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = l2Value + minRequiredGas; + currentToken.mint(currentUser, mintValue); + currentToken.approve(address(sharedBridge), mintValue); + + bytes memory callData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestDirect memory txRequest = _createL2TransactionRequestDirect({ + _chainId: currentChainId, + _mintValue: mintValue, + _l2Value: l2Value, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _l2CallData: callData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionDirect(txRequest); + Vm.Log[] memory logs = vm.getRecordedLogs(); + + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.DIRECT); + + depositsUsers[currentUser][currentTokenAddress] += mintValue; + depositsBridge[currentChainAddress][currentTokenAddress] += mintValue; + tokenSumDeposit[currentTokenAddress] += mintValue; + l2ValuesSum[currentTokenAddress] += l2Value; + } + + function withdrawERC20Token(uint256 amountToWithdraw, address tokenAddress) private useGivenToken(tokenAddress) { + uint256 l2BatchNumber = uint256(uint160(makeAddr("l2BatchNumber"))); + uint256 l2MessageIndex = uint256(uint160(makeAddr("l2MessageIndex"))); + uint16 l2TxNumberInBatch = uint16(uint160(makeAddr("l2TxNumberInBatch"))); + bytes32[] memory merkleProof = new bytes32[](1); + + _setSharedBridgeIsWithdrawalFinalized(currentChainId, l2BatchNumber, l2MessageIndex, false); + uint256 beforeChainBalance = l1Nullifier.chainBalance(currentChainId, currentTokenAddress); + uint256 beforeBalance = currentToken.balanceOf(address(sharedBridge)); + + if (beforeChainBalance < amountToWithdraw) { + vm.expectRevert("L1AR: not enough funds 2"); + } else { + tokenSumWithdrawal[currentTokenAddress] += amountToWithdraw; + } + + bytes memory message = abi.encodePacked( + IL1ERC20Bridge.finalizeWithdrawal.selector, + currentUser, + currentTokenAddress, + amountToWithdraw + ); + + L2Message memory l2ToL1Message = L2Message({ + txNumberInBatch: l2TxNumberInBatch, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + data: message + }); + + vm.mockCall( + bridgehubProxyAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL2MessageInclusion.selector, + currentChainId, + l2BatchNumber, + l2MessageIndex, + l2ToL1Message, + merkleProof + ), + abi.encode(true) + ); + + sharedBridge.finalizeWithdrawal({ + _chainId: currentChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + + // check if the balance was updated correctly + if (beforeChainBalance > amountToWithdraw) { + assertEq( + beforeChainBalance - l1Nullifier.chainBalance(currentChainId, currentTokenAddress), + amountToWithdraw + ); + assertEq(beforeBalance - currentToken.balanceOf(address(sharedBridge)), amountToWithdraw); + } + } + + function withdrawETHToken(uint256 amountToWithdraw, address tokenAddress) private useGivenToken(tokenAddress) { + uint256 l2BatchNumber = uint256(uint160(makeAddr("l2BatchNumber"))); + uint256 l2MessageIndex = uint256(uint160(makeAddr("l2MessageIndex"))); + uint16 l2TxNumberInBatch = uint16(uint160(makeAddr("l2TxNumberInBatch"))); + bytes32[] memory merkleProof = new bytes32[](1); + + _setSharedBridgeIsWithdrawalFinalized(currentChainId, l2BatchNumber, l2MessageIndex, false); + uint256 beforeChainBalance = l1Nullifier.chainBalance(currentChainId, currentTokenAddress); + uint256 beforeBalance = address(sharedBridge).balance; + + if (beforeChainBalance < amountToWithdraw) { + vm.expectRevert("L1AR: not enough funds 2"); + } else { + tokenSumWithdrawal[currentTokenAddress] += amountToWithdraw; + } + + bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, currentUser, amountToWithdraw); + L2Message memory l2ToL1Message = L2Message({ + txNumberInBatch: l2TxNumberInBatch, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + data: message + }); + + vm.mockCall( + bridgehubProxyAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL2MessageInclusion.selector, + currentChainId, + l2BatchNumber, + l2MessageIndex, + l2ToL1Message, + merkleProof + ), + abi.encode(true) + ); + + sharedBridge.finalizeWithdrawal({ + _chainId: currentChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + + // check if the balance was updated correctly + if (beforeChainBalance > amountToWithdraw) { + assertEq( + beforeChainBalance - l1Nullifier.chainBalance(currentChainId, currentTokenAddress), + amountToWithdraw + ); + assertEq(beforeBalance - address(sharedBridge).balance, amountToWithdraw); + } + } + + function depositEthToBridgeSuccess( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 l2Value + ) public virtual useUser(userIndexSeed) useZKChain(chainIndexSeed) useBaseToken { + if (currentTokenAddress == ETH_TOKEN_ADDRESS) { + depositEthBase(l2Value); + } else { + depositEthToERC20Chain(l2Value); + } + } + + function depositERC20ToBridgeSuccess( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 tokenIndexSeed, + uint256 l2Value + ) public virtual useUser(userIndexSeed) useZKChain(chainIndexSeed) useERC20Token(tokenIndexSeed) { + address chainBaseToken = getZKChainBaseToken(currentChainId); + + if (chainBaseToken == ETH_TOKEN_ADDRESS) { + depositERC20ToEthChain(l2Value, currentTokenAddress); + } else { + if (currentTokenAddress == chainBaseToken) { + depositERC20Base(l2Value); + } else { + depositERC20ToERC20Chain(l2Value, chainBaseToken); + } + } + } + + function withdrawSuccess( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 amountToWithdraw + ) public virtual useUser(userIndexSeed) useZKChain(chainIndexSeed) { + address token = getZKChainBaseToken(currentChainId); + + if (token != ETH_TOKEN_ADDRESS) { + withdrawERC20Token(amountToWithdraw, token); + } else if (token == ETH_TOKEN_ADDRESS) { + withdrawETHToken(amountToWithdraw, token); + } + } + + function getAddressesToExclude() public returns (address[] memory) { + addressesToExclude.push(bridgehubProxyAddress); + addressesToExclude.push(address(sharedBridge)); + + for (uint256 i = 0; i < users.length; i++) { + addressesToExclude.push(users[i]); + } + + for (uint256 i = 0; i < l2ContractAddresses.length; i++) { + addressesToExclude.push(l2ContractAddresses[i]); + } + + for (uint256 i = 0; i < zkChainIds.length; i++) { + addressesToExclude.push(getZKChainAddress(zkChainIds[i])); + } + + return addressesToExclude; + } + + function prepare() public { + _generateUserAddresses(); + + _deployL1Contracts(); + _deployTokens(); + _registerNewTokens(tokens); + + _deployEra(); + _deployZKChain(ETH_TOKEN_ADDRESS); + _deployZKChain(ETH_TOKEN_ADDRESS); + _deployZKChain(tokens[0]); + _deployZKChain(tokens[0]); + _deployZKChain(tokens[1]); + _deployZKChain(tokens[1]); + + for (uint256 i = 0; i < zkChainIds.length; i++) { + address contractAddress = makeAddr(string(abi.encode("contract", i))); + l2ContractAddresses.push(contractAddress); + + _addL2ChainContract(zkChainIds[i], contractAddress); + } + } + + // add this to be excluded from coverage report + function test() internal override {} +} + +contract BoundedBridgeHubInvariantTests is BridgeHubInvariantTests { + function depositEthSuccess(uint256 userIndexSeed, uint256 chainIndexSeed, uint256 l2Value) public { + uint64 MAX = 2 ** 64 - 1; + uint256 l2Value = bound(l2Value, 0.1 ether, MAX); + + emit log_string("DEPOSIT ETH"); + super.depositEthToBridgeSuccess(userIndexSeed, chainIndexSeed, l2Value); + } + + function depositERC20Success( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 tokenIndexSeed, + uint256 l2Value + ) public { + uint64 MAX = 2 ** 64 - 1; + uint256 l2Value = bound(l2Value, 0.1 ether, MAX); + + emit log_string("DEPOSIT ERC20"); + super.depositERC20ToBridgeSuccess(userIndexSeed, chainIndexSeed, tokenIndexSeed, l2Value); + } + + function withdrawERC20Success(uint256 userIndexSeed, uint256 chainIndexSeed, uint256 amountToWithdraw) public { + uint64 MAX = (2 ** 32 - 1) + 0.1 ether; + uint256 amountToWithdraw = bound(amountToWithdraw, 0.1 ether, MAX); + + emit log_string("WITHDRAW ERC20"); + super.withdrawSuccess(userIndexSeed, chainIndexSeed, amountToWithdraw); + } + + // add this to be excluded from coverage report + function testBoundedBridgeHubInvariant() internal {} +} + +contract InvariantTesterZKChains is Test { + BoundedBridgeHubInvariantTests tests; + + function setUp() public { + tests = new BoundedBridgeHubInvariantTests(); + // tests.prepare(); + } + + // // Check whether the sum of ETH deposits from tests, updated on each deposit and withdrawal, + // // equals the balance of L1Shared bridge. + // function invariant_ETHbalanceStaysEqual() public { + // require(1==1); + // } + + // add this to be excluded from coverage report + function test() internal {} +} diff --git a/l1-contracts/test/foundry/l1/integration/BridgehubTests.t.sol b/l1-contracts/test/foundry/l1/integration/BridgehubTests.t.sol new file mode 100644 index 000000000..a99790839 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/BridgehubTests.t.sol @@ -0,0 +1,741 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {Vm} from "forge-std/Vm.sol"; + +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; +import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {L1ContractDeployer} from "./_SharedL1ContractDeployer.t.sol"; +import {TokenDeployer} from "./_SharedTokenDeployer.t.sol"; +import {ZKChainDeployer} from "./_SharedZKChainDeployer.t.sol"; +import {L2TxMocker} from "./_SharedL2TxMocker.t.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK} from "contracts/common/Config.sol"; +import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; +import {L2Message} from "contracts/common/Messaging.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; + +contract BridgeHubInvariantTests is L1ContractDeployer, ZKChainDeployer, TokenDeployer, L2TxMocker { + uint256 constant TEST_USERS_COUNT = 10; + + bytes32 constant NEW_PRIORITY_REQUEST_HASH = + keccak256( + "NewPriorityRequest(uint256,bytes32,uint64,(uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256,uint256[4],bytes,bytes,uint256[],bytes,bytes),bytes[])" + ); + + enum RequestType { + DIRECT, + TWO_BRIDGES + } + + struct NewPriorityRequest { + uint256 txId; + bytes32 txHash; + uint64 expirationTimestamp; + L2CanonicalTransaction transaction; + bytes[] factoryDeps; + } + + address[] public users; + address[] public l2ContractAddresses; + address[] public addressesToExclude; + address public currentUser; + uint256 public currentChainId; + address public currentChainAddress; + address public currentTokenAddress = ETH_TOKEN_ADDRESS; + TestnetERC20Token currentToken; + + // Amounts deposited by each user, mapped by user address and token address + mapping(address user => mapping(address token => uint256 deposited)) public depositsUsers; + // Amounts deposited into the bridge, mapped by ZK chain address and token address + mapping(address chain => mapping(address token => uint256 deposited)) public depositsBridge; + // Total sum of deposits into the bridge, mapped by token address + mapping(address token => uint256 deposited) public tokenSumDeposit; + // Total sum of withdrawn tokens, mapped by token address + mapping(address token => uint256 deposited) public tokenSumWithdrawal; + // Total sum of L2 values transferred to mock contracts, mapped by token address + mapping(address token => uint256 deposited) public l2ValuesSum; + // Deposits into the ZK chains contract, mapped by L2 contract address and token address + mapping(address l2contract => mapping(address token => uint256 balance)) public contractDeposits; + // Total sum of deposits into all L2 contracts, mapped by token address + mapping(address token => uint256 deposited) public contractDepositsSum; + + // gets random user from users array, set contract variables + modifier useUser(uint256 userIndexSeed) { + currentUser = users[bound(userIndexSeed, 0, users.length - 1)]; + vm.startPrank(currentUser); + _; + vm.stopPrank(); + } + + // gets random ZK chain from ZK chain ids, set contract variables + modifier useZKChain(uint256 chainIndexSeed) { + currentChainId = zkChainIds[bound(chainIndexSeed, 0, zkChainIds.length - 1)]; + currentChainAddress = getZKChainAddress(currentChainId); + _; + } + + // use token specified by address, set contract variables + modifier useGivenToken(address tokenAddress) { + currentToken = TestnetERC20Token(tokenAddress); + currentTokenAddress = tokenAddress; + _; + } + + // use random token from tokens array, set contract variables + modifier useRandomToken(uint256 tokenIndexSeed) { + currentTokenAddress = tokens[bound(tokenIndexSeed, 0, tokens.length - 1)]; + currentToken = TestnetERC20Token(currentTokenAddress); + _; + } + + // use base token as main token + // watch out, do not use with ETH + modifier useBaseToken() { + currentToken = TestnetERC20Token(getZKChainBaseToken(currentChainId)); + currentTokenAddress = address(currentToken); + _; + } + + // use ERC token by getting randomly token + // it keeps iterating while the token is ETH + modifier useERC20Token(uint256 tokenIndexSeed) { + currentTokenAddress = tokens[bound(tokenIndexSeed, 0, tokens.length - 1)]; + + while (currentTokenAddress == ETH_TOKEN_ADDRESS) { + tokenIndexSeed += 1; + currentTokenAddress = tokens[bound(tokenIndexSeed, 0, tokens.length - 1)]; + } + + currentToken = TestnetERC20Token(currentTokenAddress); + + _; + } + + // generate MAX_USERS addresses and append it to users array + function _generateUserAddresses() internal { + require(users.length == 0, "Addresses already generated"); + + for (uint256 i = 0; i < TEST_USERS_COUNT; i++) { + address newAddress = makeAddr(string(abi.encode("account", i))); + users.push(newAddress); + } + } + + // TODO: consider what should be actually committed, do we need to simulate operator: + // blocks -> batches -> commits or just mock it. + function _commitBatchInfo(uint256 _chainId) internal { + //vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1 + 1); + + GettersFacet zkChainGetters = GettersFacet(getZKChainAddress(_chainId)); + + IExecutor.StoredBatchInfo memory batchZero; + + batchZero.batchNumber = 0; + batchZero.timestamp = 0; + batchZero.numberOfLayer1Txs = 0; + batchZero.priorityOperationsHash = EMPTY_STRING_KECCAK; + batchZero.l2LogsTreeRoot = DEFAULT_L2_LOGS_TREE_ROOT_HASH; + batchZero.batchHash = vm.parseBytes32("0x0000000000000000000000000000000000000000000000000000000000000000"); //genesis root hash + batchZero.indexRepeatedStorageChanges = uint64(0); + batchZero.commitment = vm.parseBytes32("0x0000000000000000000000000000000000000000000000000000000000000000"); + + bytes32 hashedZeroBatch = keccak256(abi.encode(batchZero)); + assertEq(zkChainGetters.storedBatchHash(0), hashedZeroBatch); + } + + // use mailbox interface to return exact amount to use as a gas on l2 side, + // prevents from failing if mintValue < l2Value + required gas + function _getMinRequiredGasPriceForChain( + uint256 _chainId, + uint256 _gasPrice, + uint256 _l2GasLimit, + uint256 _l2GasPerPubdataByteLimit + ) public view returns (uint256) { + MailboxFacet chainMailBox = MailboxFacet(getZKChainAddress(_chainId)); + + return chainMailBox.l2TransactionBaseCost(_gasPrice, _l2GasLimit, _l2GasPerPubdataByteLimit); + } + + // decodes data encoded with encodeCall, this is just to decode information received from logs + // to deposit into mock l2 contract + function _getDecodedDepositL2Calldata( + bytes memory callData + ) internal view returns (address l1Sender, address l2Receiver, address l1Token, uint256 amount, bytes memory b) { + // UnsafeBytes approach doesn't work, because abi is not deterministic + bytes memory slicedData = new bytes(callData.length - 4); + + for (uint256 i = 4; i < callData.length; i++) { + slicedData[i - 4] = callData[i]; + } + + (l1Sender, l2Receiver, l1Token, amount, b) = abi.decode( + slicedData, + (address, address, address, uint256, bytes) + ); + } + + // handle event emitted from logs, just to ensure proper decoding to set mock contract balance + function _handleRequestByMockL2Contract(NewPriorityRequest memory request, RequestType requestType) internal { + address contractAddress = address(uint160(uint256(request.transaction.to))); + + address tokenAddress; + address receiver; + uint256 toSend; + address l1Sender; + uint256 balanceAfter; + bytes memory temp; + + if (requestType == RequestType.TWO_BRIDGES) { + (l1Sender, receiver, tokenAddress, toSend, temp) = _getDecodedDepositL2Calldata(request.transaction.data); + } else { + (tokenAddress, toSend, receiver) = abi.decode(request.transaction.data, (address, uint256, address)); + } + + assertEq(contractAddress, receiver); + + if (tokenAddress == ETH_TOKEN_ADDRESS) { + uint256 balanceBefore = contractAddress.balance; + vm.deal(contractAddress, toSend + balanceBefore); + + balanceAfter = contractAddress.balance; + } else { + TestnetERC20Token token = TestnetERC20Token(tokenAddress); + token.mint(contractAddress, toSend); + + balanceAfter = token.balanceOf(contractAddress); + } + + contractDeposits[contractAddress][tokenAddress] += toSend; + contractDepositsSum[tokenAddress] += toSend; + assertEq(balanceAfter, contractDeposits[contractAddress][tokenAddress]); + } + + // gets event from logs + function _getNewPriorityQueueFromLogs(Vm.Log[] memory logs) internal returns (NewPriorityRequest memory request) { + for (uint256 i = 0; i < logs.length; i++) { + Vm.Log memory log = logs[i]; + + if (log.topics[0] == NEW_PRIORITY_REQUEST_HASH) { + ( + request.txId, + request.txHash, + request.expirationTimestamp, + request.transaction, + request.factoryDeps + ) = abi.decode(log.data, (uint256, bytes32, uint64, L2CanonicalTransaction, bytes[])); + } + } + } + + // deposits ERC20 token to the ZK chain where base token is ETH + // this function use requestL2TransactionTwoBridges function from shared bridge. + // tokenAddress should be any ERC20 token, excluding ETH + function depositERC20ToEthChain(uint256 l2Value, address tokenAddress) private useGivenToken(tokenAddress) { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = minRequiredGas; + vm.deal(currentUser, mintValue); + + currentToken.mint(currentUser, l2Value); + currentToken.approve(address(sharedBridge), l2Value); + + bytes memory secondBridgeCallData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestTwoBridgesOuter memory requestTx = _createL2TransactionRequestTwoBridges({ + _chainId: currentChainId, + _mintValue: mintValue, + _secondBridgeValue: 0, + _secondBridgeAddress: address(sharedBridge), + _l2Value: 0, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _secondBridgeCalldata: secondBridgeCallData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionTwoBridges{value: mintValue}(requestTx); + Vm.Log[] memory logs = vm.getRecordedLogs(); + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.TWO_BRIDGES); + + depositsUsers[currentUser][ETH_TOKEN_ADDRESS] += mintValue; + depositsBridge[currentChainAddress][ETH_TOKEN_ADDRESS] += mintValue; + tokenSumDeposit[ETH_TOKEN_ADDRESS] += mintValue; + + depositsUsers[currentUser][currentTokenAddress] += l2Value; + depositsBridge[currentChainAddress][currentTokenAddress] += l2Value; + tokenSumDeposit[currentTokenAddress] += l2Value; + l2ValuesSum[currentTokenAddress] += l2Value; + } + + // deposits ETH token to chain where base token is some ERC20 + // modifier prevents you from using some other token as base + function depositEthToERC20Chain(uint256 l2Value) private useBaseToken { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + vm.deal(currentUser, l2Value); + uint256 mintValue = minRequiredGas; + currentToken.mint(currentUser, mintValue); + currentToken.approve(address(sharedBridge), mintValue); + + bytes memory secondBridgeCallData = abi.encode(ETH_TOKEN_ADDRESS, uint256(0), chainContracts[currentChainId]); + L2TransactionRequestTwoBridgesOuter memory requestTx = _createL2TransactionRequestTwoBridges({ + _chainId: currentChainId, + _mintValue: mintValue, + _secondBridgeValue: l2Value, + _secondBridgeAddress: address(sharedBridge), + _l2Value: 0, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _secondBridgeCalldata: secondBridgeCallData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionTwoBridges{value: l2Value}(requestTx); + Vm.Log[] memory logs = vm.getRecordedLogs(); + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.TWO_BRIDGES); + + depositsUsers[currentUser][ETH_TOKEN_ADDRESS] += l2Value; + depositsBridge[currentChainAddress][ETH_TOKEN_ADDRESS] += l2Value; + tokenSumDeposit[ETH_TOKEN_ADDRESS] += l2Value; + l2ValuesSum[ETH_TOKEN_ADDRESS] += l2Value; + + depositsUsers[currentUser][currentTokenAddress] += mintValue; + depositsBridge[currentChainAddress][currentTokenAddress] += mintValue; + tokenSumDeposit[currentTokenAddress] += mintValue; + } + + // deposits ERC20 to token with base being also ERC20 + // there are no modifiers so watch out, baseTokenAddress should be base of ZK chain + // currentToken should be different from base + function depositERC20ToERC20Chain(uint256 l2Value, address baseTokenAddress) private { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = minRequiredGas; + + TestnetERC20Token baseToken = TestnetERC20Token(baseTokenAddress); + baseToken.mint(currentUser, mintValue); + baseToken.approve(address(sharedBridge), mintValue); + + currentToken.mint(currentUser, l2Value); + currentToken.approve(address(sharedBridge), l2Value); + + bytes memory secondBridgeCallData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestTwoBridgesOuter memory requestTx = _createL2TransactionRequestTwoBridges({ + _chainId: currentChainId, + _mintValue: mintValue, + _secondBridgeValue: 0, + _secondBridgeAddress: address(sharedBridge), + _l2Value: 0, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _secondBridgeCalldata: secondBridgeCallData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionTwoBridges(requestTx); + Vm.Log[] memory logs = vm.getRecordedLogs(); + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.TWO_BRIDGES); + + depositsUsers[currentUser][baseTokenAddress] += mintValue; + depositsBridge[currentChainAddress][baseTokenAddress] += mintValue; + tokenSumDeposit[baseTokenAddress] += mintValue; + + depositsUsers[currentUser][currentTokenAddress] += l2Value; + depositsBridge[currentChainAddress][currentTokenAddress] += l2Value; + tokenSumDeposit[currentTokenAddress] += l2Value; + l2ValuesSum[currentTokenAddress] += l2Value; + } + + // deposits ETH to ZK chain where base is ETH + function depositEthBase(uint256 l2Value) private { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + + uint256 l2GasLimit = 1000000; // reverts with 8 + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = l2Value + minRequiredGas; + vm.deal(currentUser, mintValue); + + bytes memory callData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestDirect memory txRequest = _createL2TransactionRequestDirect({ + _chainId: currentChainId, + _mintValue: mintValue, + _l2Value: l2Value, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _l2CallData: callData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionDirect{value: mintValue}(txRequest); + Vm.Log[] memory logs = vm.getRecordedLogs(); + + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.DIRECT); + + depositsUsers[currentUser][ETH_TOKEN_ADDRESS] += mintValue; + depositsBridge[currentChainAddress][ETH_TOKEN_ADDRESS] += mintValue; + tokenSumDeposit[ETH_TOKEN_ADDRESS] += mintValue; + l2ValuesSum[ETH_TOKEN_ADDRESS] += l2Value; + } + + // deposits base ERC20 token to the bridge + function depositERC20Base(uint256 l2Value) private useBaseToken { + uint256 gasPrice = 10000000; + vm.txGasPrice(gasPrice); + vm.deal(currentUser, gasPrice); + + uint256 l2GasLimit = 1000000; + uint256 minRequiredGas = _getMinRequiredGasPriceForChain( + currentChainId, + gasPrice, + l2GasLimit, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA + ); + + uint256 mintValue = l2Value + minRequiredGas; + currentToken.mint(currentUser, mintValue); + currentToken.approve(address(sharedBridge), mintValue); + + bytes memory callData = abi.encode(currentTokenAddress, l2Value, chainContracts[currentChainId]); + L2TransactionRequestDirect memory txRequest = _createL2TransactionRequestDirect({ + _chainId: currentChainId, + _mintValue: mintValue, + _l2Value: l2Value, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _l2CallData: callData + }); + + vm.recordLogs(); + bytes32 resultantHash = bridgehub.requestL2TransactionDirect(txRequest); + Vm.Log[] memory logs = vm.getRecordedLogs(); + + NewPriorityRequest memory request = _getNewPriorityQueueFromLogs(logs); + + assertNotEq(resultantHash, bytes32(0)); + assertNotEq(request.txHash, bytes32(0)); + _handleRequestByMockL2Contract(request, RequestType.DIRECT); + + depositsUsers[currentUser][currentTokenAddress] += mintValue; + depositsBridge[currentChainAddress][currentTokenAddress] += mintValue; + tokenSumDeposit[currentTokenAddress] += mintValue; + l2ValuesSum[currentTokenAddress] += l2Value; + } + + function withdrawERC20Token(uint256 amountToWithdraw, address tokenAddress) private useGivenToken(tokenAddress) { + uint256 l2BatchNumber = uint256(uint160(makeAddr("l2BatchNumber"))); + uint256 l2MessageIndex = uint256(uint160(makeAddr("l2MessageIndex"))); + uint16 l2TxNumberInBatch = uint16(uint160(makeAddr("l2TxNumberInBatch"))); + bytes32[] memory merkleProof = new bytes32[](1); + + _setSharedBridgeIsWithdrawalFinalized(currentChainId, l2BatchNumber, l2MessageIndex, false); + uint256 beforeChainBalance = l1Nullifier.chainBalance(currentChainId, currentTokenAddress); + uint256 beforeBalance = currentToken.balanceOf(address(sharedBridge)); + + if (beforeChainBalance < amountToWithdraw) { + vm.expectRevert("L1AR: not enough funds 2"); + } else { + tokenSumWithdrawal[currentTokenAddress] += amountToWithdraw; + } + + bytes memory message = abi.encodePacked( + IL1ERC20Bridge.finalizeWithdrawal.selector, + currentUser, + currentTokenAddress, + amountToWithdraw + ); + + L2Message memory l2ToL1Message = L2Message({ + txNumberInBatch: l2TxNumberInBatch, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + data: message + }); + + vm.mockCall( + bridgehubProxyAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL2MessageInclusion.selector, + currentChainId, + l2BatchNumber, + l2MessageIndex, + l2ToL1Message, + merkleProof + ), + abi.encode(true) + ); + + sharedBridge.finalizeWithdrawal({ + _chainId: currentChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + + // check if the balance was updated correctly + if (beforeChainBalance > amountToWithdraw) { + assertEq( + beforeChainBalance - l1Nullifier.chainBalance(currentChainId, currentTokenAddress), + amountToWithdraw + ); + assertEq(beforeBalance - currentToken.balanceOf(address(sharedBridge)), amountToWithdraw); + } + } + + function withdrawETHToken(uint256 amountToWithdraw, address tokenAddress) private useGivenToken(tokenAddress) { + uint256 l2BatchNumber = uint256(uint160(makeAddr("l2BatchNumber"))); + uint256 l2MessageIndex = uint256(uint160(makeAddr("l2MessageIndex"))); + uint16 l2TxNumberInBatch = uint16(uint160(makeAddr("l2TxNumberInBatch"))); + bytes32[] memory merkleProof = new bytes32[](1); + + _setSharedBridgeIsWithdrawalFinalized(currentChainId, l2BatchNumber, l2MessageIndex, false); + uint256 beforeChainBalance = l1Nullifier.chainBalance(currentChainId, currentTokenAddress); + uint256 beforeBalance = address(sharedBridge).balance; + + if (beforeChainBalance < amountToWithdraw) { + vm.expectRevert("L1AR: not enough funds 2"); + } else { + tokenSumWithdrawal[currentTokenAddress] += amountToWithdraw; + } + + bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, currentUser, amountToWithdraw); + L2Message memory l2ToL1Message = L2Message({ + txNumberInBatch: l2TxNumberInBatch, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + data: message + }); + + vm.mockCall( + bridgehubProxyAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL2MessageInclusion.selector, + currentChainId, + l2BatchNumber, + l2MessageIndex, + l2ToL1Message, + merkleProof + ), + abi.encode(true) + ); + + sharedBridge.finalizeWithdrawal({ + _chainId: currentChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + + // check if the balance was updated correctly + if (beforeChainBalance > amountToWithdraw) { + assertEq( + beforeChainBalance - l1Nullifier.chainBalance(currentChainId, currentTokenAddress), + amountToWithdraw + ); + assertEq(beforeBalance - address(sharedBridge).balance, amountToWithdraw); + } + } + + function depositEthToBridgeSuccess( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 l2Value + ) public virtual useUser(userIndexSeed) useZKChain(chainIndexSeed) useBaseToken { + if (currentTokenAddress == ETH_TOKEN_ADDRESS) { + depositEthBase(l2Value); + } else { + depositEthToERC20Chain(l2Value); + } + } + + function depositERC20ToBridgeSuccess( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 tokenIndexSeed, + uint256 l2Value + ) public virtual useUser(userIndexSeed) useZKChain(chainIndexSeed) useERC20Token(tokenIndexSeed) { + address chainBaseToken = getZKChainBaseToken(currentChainId); + + if (chainBaseToken == ETH_TOKEN_ADDRESS) { + depositERC20ToEthChain(l2Value, currentTokenAddress); + } else { + if (currentTokenAddress == chainBaseToken) { + depositERC20Base(l2Value); + } else { + depositERC20ToERC20Chain(l2Value, chainBaseToken); + } + } + } + + function withdrawSuccess( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 amountToWithdraw + ) public virtual useUser(userIndexSeed) useZKChain(chainIndexSeed) { + address token = getZKChainBaseToken(currentChainId); + + if (token != ETH_TOKEN_ADDRESS) { + withdrawERC20Token(amountToWithdraw, token); + } else if (token == ETH_TOKEN_ADDRESS) { + withdrawETHToken(amountToWithdraw, token); + } + } + + function getAddressesToExclude() public returns (address[] memory) { + addressesToExclude.push(bridgehubProxyAddress); + addressesToExclude.push(address(sharedBridge)); + + for (uint256 i = 0; i < users.length; i++) { + addressesToExclude.push(users[i]); + } + + for (uint256 i = 0; i < l2ContractAddresses.length; i++) { + addressesToExclude.push(l2ContractAddresses[i]); + } + + for (uint256 i = 0; i < zkChainIds.length; i++) { + addressesToExclude.push(getZKChainAddress(zkChainIds[i])); + } + + return addressesToExclude; + } + + function prepare() public { + _generateUserAddresses(); + + _deployL1Contracts(); + _deployTokens(); + _registerNewTokens(tokens); + + _deployEra(); + _deployZKChain(ETH_TOKEN_ADDRESS); + _deployZKChain(ETH_TOKEN_ADDRESS); + _deployZKChain(tokens[0]); + _deployZKChain(tokens[0]); + _deployZKChain(tokens[1]); + _deployZKChain(tokens[1]); + + for (uint256 i = 0; i < zkChainIds.length; i++) { + address contractAddress = makeAddr(string(abi.encode("contract", i))); + l2ContractAddresses.push(contractAddress); + + _addL2ChainContract(zkChainIds[i], contractAddress); + } + } + + // add this to be excluded from coverage report + function test() internal override {} +} + +contract BoundedBridgeHubInvariantTests is BridgeHubInvariantTests { + function depositEthSuccess(uint256 userIndexSeed, uint256 chainIndexSeed, uint256 l2Value) public { + uint64 MAX = 2 ** 64 - 1; + uint256 l2Value = bound(l2Value, 0.1 ether, MAX); + + emit log_string("DEPOSIT ETH"); + super.depositEthToBridgeSuccess(userIndexSeed, chainIndexSeed, l2Value); + } + + function depositERC20Success( + uint256 userIndexSeed, + uint256 chainIndexSeed, + uint256 tokenIndexSeed, + uint256 l2Value + ) public { + uint64 MAX = 2 ** 64 - 1; + uint256 l2Value = bound(l2Value, 0.1 ether, MAX); + + emit log_string("DEPOSIT ERC20"); + super.depositERC20ToBridgeSuccess(userIndexSeed, chainIndexSeed, tokenIndexSeed, l2Value); + } + + function withdrawERC20Success(uint256 userIndexSeed, uint256 chainIndexSeed, uint256 amountToWithdraw) public { + uint64 MAX = (2 ** 32 - 1) + 0.1 ether; + uint256 amountToWithdraw = bound(amountToWithdraw, 0.1 ether, MAX); + + emit log_string("WITHDRAW ERC20"); + super.withdrawSuccess(userIndexSeed, chainIndexSeed, amountToWithdraw); + } + + // add this to be excluded from coverage report + function testBoundedBridgeHubInvariant() internal {} +} + +// contract InvariantTesterZKChains is Test { +// BoundedBridgeHubInvariantTests tests; + +// function setUp() public { +// tests = new BoundedBridgeHubInvariantTests(); +// tests.prepare(); +// } + +// // Check whether the sum of ETH deposits from tests, updated on each deposit and withdrawal, +// // equals the balance of L1Shared bridge. +// function test_ETHbalanceStaysEqual() public { +// require(1 == 1); +// } + +// // add this to be excluded from coverage report +// function test() internal {} +// } diff --git a/l1-contracts/test/foundry/l1/integration/DeploymentTest.t.sol b/l1-contracts/test/foundry/l1/integration/DeploymentTest.t.sol new file mode 100644 index 000000000..b4cca3bb1 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/DeploymentTest.t.sol @@ -0,0 +1,174 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {Vm} from "forge-std/Vm.sol"; + +import {Ownable} from "@openzeppelin/contracts-v4/access/Ownable.sol"; + +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; +import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {L1ContractDeployer} from "./_SharedL1ContractDeployer.t.sol"; +import {TokenDeployer} from "./_SharedTokenDeployer.t.sol"; +import {ZKChainDeployer} from "./_SharedZKChainDeployer.t.sol"; +import {L2TxMocker} from "./_SharedL2TxMocker.t.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK} from "contracts/common/Config.sol"; +import {L2CanonicalTransaction, L2Message} from "contracts/common/Messaging.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {IncorrectBridgeHubAddress} from "contracts/common/L1ContractErrors.sol"; + +contract DeploymentTests is L1ContractDeployer, ZKChainDeployer, TokenDeployer, L2TxMocker { + uint256 constant TEST_USERS_COUNT = 10; + address[] public users; + address[] public l2ContractAddresses; + + // generate MAX_USERS addresses and append it to users array + function _generateUserAddresses() internal { + require(users.length == 0, "Addresses already generated"); + + for (uint256 i = 0; i < TEST_USERS_COUNT; i++) { + address newAddress = makeAddr(string(abi.encode("account", i))); + users.push(newAddress); + } + } + + function prepare() public { + _generateUserAddresses(); + + _deployL1Contracts(); + _deployTokens(); + _registerNewTokens(tokens); + + _deployEra(); + // _deployZKChain(ETH_TOKEN_ADDRESS); + // _deployZKChain(ETH_TOKEN_ADDRESS); + // _deployZKChain(tokens[0]); + // _deployZKChain(tokens[0]); + // _deployZKChain(tokens[1]); + // _deployZKChain(tokens[1]); + + for (uint256 i = 0; i < zkChainIds.length; i++) { + address contractAddress = makeAddr(string(abi.encode("contract", i))); + l2ContractAddresses.push(contractAddress); + + _addL2ChainContract(zkChainIds[i], contractAddress); + } + } + + function setUp() public { + prepare(); + } + + // Check whether the sum of ETH deposits from tests, updated on each deposit and withdrawal, + // equals the balance of L1Shared bridge. + function test_initialDeployment() public { + uint256 chainId = zkChainIds[0]; + address newChainAddress = bridgehub.getZKChain(chainId); + address admin = IZKChain(bridgehub.getZKChain(chainId)).getAdmin(); + + assertNotEq(admin, address(0)); + assertNotEq(newChainAddress, address(0)); + + address[] memory chainAddresses = bridgehub.getAllZKChains(); + assertEq(chainAddresses.length, 1); + assertEq(chainAddresses[0], newChainAddress); + + uint256[] memory chainIds = bridgehub.getAllZKChainChainIDs(); + assertEq(chainIds.length, 1); + assertEq(chainIds[0], chainId); + + uint256 protocolVersion = chainTypeManager.getProtocolVersion(chainId); + assertEq(protocolVersion, 25); + } + + function test_bridgehubSetter() public { + uint256 chainId = zkChainIds[0]; + uint256 randomChainId = 123456; + + vm.mockCall( + address(chainTypeManager), + abi.encodeWithSelector(IChainTypeManager.getZKChainLegacy.selector, randomChainId), + abi.encode(address(0x01)) + ); + vm.store(address(bridgehub), keccak256(abi.encode(randomChainId, 205)), bytes32(uint256(uint160(1)))); + vm.store( + address(bridgehub), + keccak256(abi.encode(randomChainId, 204)), + bytes32(uint256(uint160(address(chainTypeManager)))) + ); + bridgehub.setLegacyBaseTokenAssetId(randomChainId); + bridgehub.setLegacyChainAddress(randomChainId); + } + + function test_registerAlreadyDeployedZKChain() public { + address owner = Ownable(address(bridgehub)).owner(); + + { + uint256 chainId = currentZKChainId++; + bytes32 baseTokenAssetId = DataEncoding.encodeNTVAssetId(chainId, ETH_TOKEN_ADDRESS); + + address chain = _deployZkChain( + chainId, + baseTokenAssetId, + owner, + chainTypeManager.protocolVersion(), + chainTypeManager.storedBatchZero(), + address(bridgehub) + ); + + address stmAddr = IZKChain(chain).getChainTypeManager(); + + vm.startBroadcast(owner); + bridgehub.addChainTypeManager(stmAddr); + bridgehub.addTokenAssetId(baseTokenAssetId); + bridgehub.registerAlreadyDeployedZKChain(chainId, chain); + vm.stopBroadcast(); + + address bridgehubStmForChain = bridgehub.chainTypeManager(chainId); + bytes32 bridgehubBaseAssetIdForChain = bridgehub.baseTokenAssetId(chainId); + address bridgehubChainAddressdForChain = bridgehub.getZKChain(chainId); + address bhAddr = IZKChain(chain).getBridgehub(); + + assertEq(bridgehubStmForChain, stmAddr); + assertEq(bridgehubBaseAssetIdForChain, baseTokenAssetId); + assertEq(bridgehubChainAddressdForChain, chain); + assertEq(bhAddr, address(bridgehub)); + } + + { + uint256 chainId = currentZKChainId++; + bytes32 baseTokenAssetId = DataEncoding.encodeNTVAssetId(chainId, ETH_TOKEN_ADDRESS); + address chain = _deployZkChain( + chainId, + baseTokenAssetId, + owner, + chainTypeManager.protocolVersion(), + chainTypeManager.storedBatchZero(), + address(bridgehub.sharedBridge()) + ); + + address stmAddr = IZKChain(chain).getChainTypeManager(); + + vm.startBroadcast(owner); + bridgehub.addTokenAssetId(baseTokenAssetId); + vm.expectRevert( + abi.encodeWithSelector(IncorrectBridgeHubAddress.selector, address(bridgehub.sharedBridge())) + ); + bridgehub.registerAlreadyDeployedZKChain(chainId, chain); + vm.stopBroadcast(); + } + } + + // add this to be excluded from coverage report + function test() internal override {} +} diff --git a/l1-contracts/test/foundry/l1/integration/L1GatewayTests.t.sol b/l1-contracts/test/foundry/l1/integration/L1GatewayTests.t.sol new file mode 100644 index 000000000..9f67d7973 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/L1GatewayTests.t.sol @@ -0,0 +1,335 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {Vm} from "forge-std/Vm.sol"; +import "forge-std/console.sol"; + +import {Ownable} from "@openzeppelin/contracts-v4/access/Ownable.sol"; + +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter, BridgehubMintCTMAssetData, BridgehubBurnCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; +import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; +import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {L1ContractDeployer} from "./_SharedL1ContractDeployer.t.sol"; +import {TokenDeployer} from "./_SharedTokenDeployer.t.sol"; +import {ZKChainDeployer} from "./_SharedZKChainDeployer.t.sol"; +import {GatewayDeployer} from "./_SharedGatewayDeployer.t.sol"; +import {L2TxMocker} from "./_SharedL2TxMocker.t.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK} from "contracts/common/Config.sol"; +import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; +import {L2Message} from "contracts/common/Messaging.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, L2_ASSET_ROUTER_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {FinalizeL1DepositParams} from "contracts/bridge/L1Nullifier.sol"; + +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {TxStatus} from "contracts/common/Messaging.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {IncorrectBridgeHubAddress} from "contracts/common/L1ContractErrors.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; + +contract L1GatewayTests is L1ContractDeployer, ZKChainDeployer, TokenDeployer, L2TxMocker, GatewayDeployer { + uint256 constant TEST_USERS_COUNT = 10; + address[] public users; + address[] public l2ContractAddresses; + + uint256 migratingChainId = 10; + IZKChain migratingChain; + + uint256 gatewayChainId = 11; + IZKChain gatewayChain; + + uint256 mintChainId = 12; + + // generate MAX_USERS addresses and append it to users array + function _generateUserAddresses() internal { + require(users.length == 0, "Addresses already generated"); + + for (uint256 i = 0; i < TEST_USERS_COUNT; i++) { + address newAddress = makeAddr(string(abi.encode("account", i))); + users.push(newAddress); + } + } + + function prepare() public { + _generateUserAddresses(); + + _deployL1Contracts(); + _deployTokens(); + _registerNewTokens(tokens); + + _deployEra(); + _deployZKChain(ETH_TOKEN_ADDRESS); + acceptPendingAdmin(); + _deployZKChain(ETH_TOKEN_ADDRESS); + acceptPendingAdmin(); + // _deployZKChain(tokens[0]); + // _deployZKChain(tokens[0]); + // _deployZKChain(tokens[1]); + // _deployZKChain(tokens[1]); + + for (uint256 i = 0; i < zkChainIds.length; i++) { + address contractAddress = makeAddr(string(abi.encode("contract", i))); + l2ContractAddresses.push(contractAddress); + + _addL2ChainContract(zkChainIds[i], contractAddress); + // _registerL2SharedBridge(zkChainIds[i], contractAddress); + } + + _initializeGatewayScript(); + + vm.deal(ecosystemConfig.ownerAddress, 100000000000000000000000000000000000); + migratingChain = IZKChain(IBridgehub(bridgehub).getZKChain(migratingChainId)); + gatewayChain = IZKChain(IBridgehub(bridgehub).getZKChain(gatewayChainId)); + vm.deal(migratingChain.getAdmin(), 100000000000000000000000000000000000); + vm.deal(gatewayChain.getAdmin(), 100000000000000000000000000000000000); + + // vm.deal(msg.sender, 100000000000000000000000000000000000); + // vm.deal(bridgehub, 100000000000000000000000000000000000); + } + + // This is a method to simplify porting the tests for now. + // Here we rely that the first restriction is the AccessControlRestriction + function _extractAccessControlRestriction(address admin) internal returns (address) { + return ChainAdmin(payable(admin)).getRestrictions()[0]; + } + + function setUp() public { + prepare(); + } + + function _setUpGatewayWithFilterer() internal { + gatewayScript.governanceRegisterGateway(); + gatewayScript.deployAndSetGatewayTransactionFilterer(); + } + + // + function test_registerGateway() public { + _setUpGatewayWithFilterer(); + } + + // + function test_moveChainToGateway() public { + _setUpGatewayWithFilterer(); + gatewayScript.migrateChainToGateway( + migratingChain.getAdmin(), + _extractAccessControlRestriction(migratingChain.getAdmin()), + migratingChainId + ); + require(bridgehub.settlementLayer(migratingChainId) == gatewayChainId, "Migration failed"); + } + + function test_l2Registration() public { + _setUpGatewayWithFilterer(); + gatewayScript.migrateChainToGateway( + migratingChain.getAdmin(), + _extractAccessControlRestriction(migratingChain.getAdmin()), + migratingChainId + ); + gatewayScript.governanceSetCTMAssetHandler(bytes32(0)); + gatewayScript.registerAssetIdInBridgehub(address(0x01), bytes32(0)); + } + + function test_startMessageToL3() public { + _setUpGatewayWithFilterer(); + gatewayScript.migrateChainToGateway( + migratingChain.getAdmin(), + _extractAccessControlRestriction(migratingChain.getAdmin()), + migratingChainId + ); + IBridgehub bridgehub = IBridgehub(bridgehub); + uint256 expectedValue = 1000000000000000000000; + + L2TransactionRequestDirect memory request = _createL2TransactionRequestDirect( + migratingChainId, + expectedValue, + 0, + 72000000, + 800, + "0x" + ); + bridgehub.requestL2TransactionDirect{value: expectedValue}(request); + } + + function test_recoverFromFailedChainMigration() public { + _setUpGatewayWithFilterer(); + gatewayScript.migrateChainToGateway( + migratingChain.getAdmin(), + _extractAccessControlRestriction(migratingChain.getAdmin()), + migratingChainId + ); + + // Setup + IBridgehub bridgehub = IBridgehub(bridgehub); + bytes32 assetId = bridgehub.ctmAssetIdFromChainId(migratingChainId); + bytes memory transferData; + + { + IZKChain chain = IZKChain(bridgehub.getZKChain(migratingChainId)); + bytes memory chainData = abi.encode(chain.getProtocolVersion()); + bytes memory ctmData = abi.encode( + address(1), + msg.sender, + chainTypeManager.protocolVersion(), + ecosystemConfig.contracts.diamondCutData + ); + BridgehubBurnCTMAssetData memory data = BridgehubBurnCTMAssetData({ + chainId: migratingChainId, + ctmData: ctmData, + chainData: chainData + }); + transferData = abi.encode(data); + } + + address chainAdmin = IZKChain(bridgehub.getZKChain(migratingChainId)).getAdmin(); + IL1AssetRouter assetRouter = IL1AssetRouter(address(bridgehub.sharedBridge())); + bytes32 l2TxHash = keccak256("l2TxHash"); + uint256 l2BatchNumber = 5; + uint256 l2MessageIndex = 0; + uint16 l2TxNumberInBatch = 0; + bytes32[] memory merkleProof = new bytes32[](1); + bytes32 txDataHash = keccak256(bytes.concat(bytes1(0x01), abi.encode(chainAdmin, assetId, transferData))); + + // Mock Call for Msg Inclusion + vm.mockCall( + address(bridgehub), + abi.encodeWithSelector( + IBridgehub.proveL1ToL2TransactionStatus.selector, + migratingChainId, + l2TxHash, + l2BatchNumber, + l2MessageIndex, + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure + ), + abi.encode(true) + ); + + // Set Deposit Happened + vm.startBroadcast(address(bridgehub)); + assetRouter.bridgehubConfirmL2Transaction({ + _chainId: migratingChainId, + _txDataHash: txDataHash, + _txHash: l2TxHash + }); + vm.stopBroadcast(); + + vm.startBroadcast(); + l1Nullifier.bridgeRecoverFailedTransfer({ + _chainId: migratingChainId, + _depositSender: chainAdmin, + _assetId: assetId, + _assetData: transferData, + _l2TxHash: l2TxHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + vm.stopBroadcast(); + } + + function test_finishMigrateBackChain() public { + _setUpGatewayWithFilterer(); + gatewayScript.migrateChainToGateway( + migratingChain.getAdmin(), + _extractAccessControlRestriction(migratingChain.getAdmin()), + migratingChainId + ); + migrateBackChain(); + } + + function migrateBackChain() public { + IBridgehub bridgehub = IBridgehub(bridgehub); + IZKChain migratingChain = IZKChain(bridgehub.getZKChain(migratingChainId)); + bytes32 assetId = bridgehub.ctmAssetIdFromChainId(migratingChainId); + + vm.startBroadcast(Ownable(address(bridgehub)).owner()); + bridgehub.registerSettlementLayer(gatewayChainId, true); + vm.stopBroadcast(); + + bytes32 baseTokenAssetId = eraConfig.baseTokenAssetId; + + uint256 currentChainId = block.chainid; + // we are already on L1, so we have to set another chain id, it cannot be GW or mintChainId. + vm.chainId(migratingChainId); + vm.mockCall( + address(bridgehub), + abi.encodeWithSelector(IBridgehub.proveL2MessageInclusion.selector), + abi.encode(true) + ); + vm.mockCall( + address(bridgehub), + abi.encodeWithSelector(IBridgehub.ctmAssetIdFromChainId.selector), + abi.encode(assetId) + ); + vm.mockCall( + address(chainTypeManager), + abi.encodeWithSelector(IChainTypeManager.protocolVersion.selector), + abi.encode(chainTypeManager.protocolVersion()) + ); + + uint256 protocolVersion = chainTypeManager.getProtocolVersion(migratingChainId); + + bytes memory chainData = abi.encode(IAdmin(address(migratingChain)).prepareChainCommitment()); + bytes memory ctmData = abi.encode( + baseTokenAssetId, + msg.sender, + protocolVersion, + ecosystemConfig.contracts.diamondCutData + ); + BridgehubMintCTMAssetData memory data = BridgehubMintCTMAssetData({ + chainId: migratingChainId, + baseTokenAssetId: baseTokenAssetId, + ctmData: ctmData, + chainData: chainData + }); + bytes memory bridgehubMintData = abi.encode(data); + bytes memory message = abi.encodePacked( + IAssetRouterBase.finalizeDeposit.selector, + gatewayChainId, + assetId, + bridgehubMintData + ); + gatewayScript.finishMigrateChainFromGateway( + migratingChainId, + gatewayChainId, + 0, + 0, + 0, + message, + new bytes32[](0) + ); + + vm.chainId(currentChainId); + + assertEq(bridgehub.baseTokenAssetId(migratingChainId), baseTokenAssetId); + IZKChain migratingChainContract = IZKChain(bridgehub.getZKChain(migratingChainId)); + assertEq(migratingChainContract.getBaseTokenAssetId(), baseTokenAssetId); + } + + /// to increase coverage, properly tested in L2GatewayTests + function test_forwardToL3OnGateway() public { + _setUpGatewayWithFilterer(); + vm.chainId(12345); + vm.startBroadcast(SETTLEMENT_LAYER_RELAY_SENDER); + bridgehub.forwardTransactionOnGateway(migratingChainId, bytes32(0), 0); + vm.stopBroadcast(); + } + + // add this to be excluded from coverage report + function test() internal override {} +} diff --git a/l1-contracts/test/foundry/l1/integration/UpgradeTest.t.sol b/l1-contracts/test/foundry/l1/integration/UpgradeTest.t.sol new file mode 100644 index 000000000..3cbcf454b --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/UpgradeTest.t.sol @@ -0,0 +1,105 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +// solhint-disable no-console, gas-custom-errors + +import {Script, console2 as console} from "forge-std/Script.sol"; +import {stdToml} from "forge-std/StdToml.sol"; + +import {EcosystemUpgrade} from "deploy-scripts/upgrade/EcosystemUpgrade.s.sol"; +import {ChainUpgrade} from "deploy-scripts/upgrade/ChainUpgrade.s.sol"; +import {Call} from "contracts/governance/Common.sol"; +import {Test} from "forge-std/Test.sol"; + +string constant ECOSYSTEM_INPUT = "/test/foundry/l1/integration/upgrade-envs/script-config/mainnet.toml"; +string constant ECOSYSTEM_OUTPUT = "/test/foundry/l1/integration/upgrade-envs/script-out/mainnet.toml"; +string constant CHAIN_INPUT = "/test/foundry/l1/integration/upgrade-envs/script-config/mainnet-era.toml"; +string constant CHAIN_OUTPUT = "/test/foundry/l1/integration/upgrade-envs/script-out/mainnet-era.toml"; + +contract UpgradeTest is Test { + EcosystemUpgrade generateUpgradeData; + ChainUpgrade chainUpgrade; + + function setUp() public { + generateUpgradeData = new EcosystemUpgrade(); + chainUpgrade = new ChainUpgrade(); + } + + function test_MainnetFork() public { + console.log("Preparing ecosystem contracts"); + // Firstly, we deploy all the contracts. + generateUpgradeData.prepareEcosystemContracts(ECOSYSTEM_INPUT, ECOSYSTEM_OUTPUT); + + // For chain, we have deployed the DA validator contracts + // and also updated the chain admin. + // IMPORTANT: for erc20-based chains with token multiplier setter + // this should be coordinated with the server. + console.log("Preparing chain for the upgrade"); + chainUpgrade.prepareChain(ECOSYSTEM_INPUT, ECOSYSTEM_OUTPUT, CHAIN_INPUT, CHAIN_OUTPUT); + + console.log("Starting stage1 of the upgrade!"); + // Now, some time has passed and we are ready to start the upgrade of the + // ecosystem. + // Stage 1 of the upgrade: + // - accept all the ownerships of the contracts + // - set the new upgrade data for chains + update validator timelock. + Call[] memory stage1Calls = mergeCalls( + generateUpgradeData.provideAcceptOwnershipCalls(), + generateUpgradeData.provideSetNewVersionUpgradeCall() + ); + + governanceMulticall(generateUpgradeData.getOwnerAddress(), stage1Calls); + + console.log("Stage1 is done, now all the chains have to upgrade to the new version"); + + console.log("Upgrading Era"); + + // Now, the admin of the Era needs to call the upgrade function. + // Note, that the step below also updated ValidatorTimelock so the server needs to be ready for that. + // TODO: We do not include calls that ensure that the server is ready for the sake of brevity. + chainUpgrade.upgradeChain( + generateUpgradeData.getOldProtocolVersion(), + generateUpgradeData.getChainUpgradeInfo() + ); + + // TODO: here we should include tests that depoists work for upgraded chains + // including era specific deposit/withdraw functions + // We also may need to test that normal flow of block commit / verify / execute works (but it is hard) + + vm.warp(generateUpgradeData.getOldProtocolDeadline()); + + console.log("Starting stage2 of the upgrade!"); + governanceMulticall(generateUpgradeData.getOwnerAddress(), generateUpgradeData.getStage2UpgradeCalls()); + + // TODO: here we should have tests that the bridging works for the previously deployed chains + // and that it does not work for those that did not upgrade. + // TODO: test that creation of new chains works under new conditions. + // TODO: if not hard, include test for deploying a gateway and migrating Era to it. + } + + /// @dev This is a contract that is used for additional visibility of transactions + /// that the decentralized governance should do. + function governanceMulticall(address governanceAddr, Call[] memory calls) internal { + // How the governance is implemented is out of scope here + vm.startBroadcast(governanceAddr); + + for (uint256 i = 0; i < calls.length; i++) { + Call memory call = calls[i]; + + (bool success, bytes memory data) = payable(call.target).call{value: call.value}(call.data); + require(success, "Multicall failed"); + } + + vm.stopBroadcast(); + } + + function mergeCalls(Call[] memory a, Call[] memory b) internal pure returns (Call[] memory result) { + result = new Call[](a.length + b.length); + for (uint256 i = 0; i < a.length; i++) { + result[i] = a[i]; + } + for (uint256 i = 0; i < b.length; i++) { + result[a.length + i] = b[i]; + } + } +} diff --git a/l1-contracts/test/foundry/l1/integration/_GatewayPreparationForTests.sol b/l1-contracts/test/foundry/l1/integration/_GatewayPreparationForTests.sol new file mode 100644 index 000000000..26e754d9f --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/_GatewayPreparationForTests.sol @@ -0,0 +1,49 @@ +import {stdToml} from "forge-std/StdToml.sol"; +import {Script, console2 as console} from "forge-std/Script.sol"; + +import {GatewayPreparation} from "deploy-scripts/GatewayPreparation.s.sol"; + +contract GatewayPreparationForTests is GatewayPreparation { + using stdToml for string; + + function initializeConfig() internal override { + // Grab config from output of l1 deployment + string memory root = vm.projectRoot(); + string memory path = string.concat(root, vm.envString("L1_OUTPUT")); + string memory toml = vm.readFile(path); + + config.bridgehub = toml.readAddress("$.deployed_addresses.bridgehub.bridgehub_proxy_addr"); + config.chainTypeManagerProxy = toml.readAddress( + "$.deployed_addresses.state_transition.state_transition_proxy_addr" + ); + config.sharedBridgeProxy = toml.readAddress("$.deployed_addresses.bridges.shared_bridge_proxy_addr"); + config.ctmDeploymentTracker = toml.readAddress( + "$.deployed_addresses.bridgehub.ctm_deployment_tracker_proxy_addr" + ); + config.governance = toml.readAddress("$.deployed_addresses.governance_addr"); + + path = string.concat(root, vm.envString("GATEWAY_AS_CHAIN_CONFIG")); + toml = vm.readFile(path); + + config.gatewayChainId = toml.readUint("$.chain.chain_chain_id"); + + path = string.concat(root, vm.envString("GATEWAY_AS_CHAIN_OUTPUT")); + toml = vm.readFile(path); + + config.gatewayChainAdmin = toml.readAddress("$.chain_admin_addr"); + config.gatewayChainProxyAdmin = toml.readAddress("$.chain_proxy_admin_addr"); + config.gatewayAccessControlRestriction = toml.readAddress( + "$.deployed_addresses.access_control_restriction_addr" + ); + config.l1NullifierProxy = toml.readAddress("$.deployed_addresses.bridges.l1_nullifier_proxy_addr"); + + console.log("chain chain id = ", config.gatewayChainId); + + // This value is never checked in the integration tests + config.gatewayDiamondCutData = hex""; + } + + function _getL1GasPrice() internal view override returns (uint256) { + return 10; + } +} diff --git a/l1-contracts/test/foundry/l1/integration/_SharedGatewayDeployer.t.sol b/l1-contracts/test/foundry/l1/integration/_SharedGatewayDeployer.t.sol new file mode 100644 index 000000000..bd72835af --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/_SharedGatewayDeployer.t.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {L1ContractDeployer} from "./_SharedL1ContractDeployer.t.sol"; +import {GatewayPreparationForTests} from "./_GatewayPreparationForTests.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import "@openzeppelin/contracts-v4/utils/Strings.sol"; + +contract GatewayDeployer is L1ContractDeployer { + GatewayPreparationForTests gatewayScript; + + function _initializeGatewayScript() internal { + vm.setEnv("L1_CONFIG", "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-l1.toml"); + vm.setEnv("L1_OUTPUT", "/test/foundry/l1/integration/deploy-scripts/script-out/output-deploy-l1.toml"); + vm.setEnv( + "ZK_CHAIN_CONFIG", + "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-zk-chain-10.toml" + ); + vm.setEnv( + "GATEWAY_AS_CHAIN_CONFIG", + "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-zk-chain-11.toml" + ); + vm.setEnv( + "GATEWAY_AS_CHAIN_OUTPUT", + "/test/foundry/l1/integration/deploy-scripts/script-out/output-deploy-zk-chain-11.toml" + ); + + gatewayScript = new GatewayPreparationForTests(); + gatewayScript.run(); + } +} diff --git a/l1-contracts/test/foundry/l1/integration/_SharedL1ContractDeployer.t.sol b/l1-contracts/test/foundry/l1/integration/_SharedL1ContractDeployer.t.sol new file mode 100644 index 000000000..e4cb7e690 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/_SharedL1ContractDeployer.t.sol @@ -0,0 +1,127 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {StdStorage, stdStorage} from "forge-std/Test.sol"; + +import {DeployL1Script} from "deploy-scripts/DeployL1.s.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {CTMDeploymentTracker} from "contracts/bridgehub/CTMDeploymentTracker.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {DeployedAddresses, Config} from "deploy-scripts/DeployUtils.s.sol"; + +contract L1ContractDeployer is Test { + using stdStorage for StdStorage; + + DeployL1Script l1Script; + DeployedAddresses public ecosystemAddresses; + Config public ecosystemConfig; + + address bridgehubProxyAddress; + address bridgehubOwnerAddress; + Bridgehub bridgehub; + + CTMDeploymentTracker ctmDeploymentTracker; + + L1AssetRouter public sharedBridge; + L1Nullifier public l1Nullifier; + L1NativeTokenVault public l1NativeTokenVault; + + IChainTypeManager chainTypeManager; + + function _deployL1Contracts() internal { + vm.setEnv("L1_CONFIG", "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-l1.toml"); + vm.setEnv("L1_OUTPUT", "/test/foundry/l1/integration/deploy-scripts/script-out/output-deploy-l1.toml"); + vm.setEnv( + "ZK_CHAIN_CONFIG", + "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-zk-chain-era.toml" + ); + vm.setEnv( + "ZK_CHAIN_OUT", + "/test/foundry/l1/integration/deploy-scripts/script-out/output-deploy-zk-chain-era.toml" + ); + vm.setEnv( + "GATEWAY_PREPARATION_L1_CONFIG", + "/test/foundry/l1/integration/deploy-scripts/script-config/gateway-preparation-l1.toml" + ); + + l1Script = new DeployL1Script(); + l1Script.runForTest(); + + ecosystemAddresses = l1Script.getAddresses(); + ecosystemConfig = l1Script.getConfig(); + + bridgehub = Bridgehub(ecosystemAddresses.bridgehub.bridgehubProxy); + chainTypeManager = IChainTypeManager(ecosystemAddresses.stateTransition.chainTypeManagerProxy); + ctmDeploymentTracker = CTMDeploymentTracker(ecosystemAddresses.bridgehub.ctmDeploymentTrackerProxy); + + sharedBridge = L1AssetRouter(ecosystemAddresses.bridges.sharedBridgeProxy); + l1Nullifier = L1Nullifier(ecosystemAddresses.bridges.l1NullifierProxy); + l1NativeTokenVault = L1NativeTokenVault(payable(ecosystemAddresses.vaults.l1NativeTokenVaultProxy)); + + _acceptOwnership(); + _setEraBatch(); + + bridgehubOwnerAddress = bridgehub.owner(); + } + + function _acceptOwnership() private { + vm.startPrank(bridgehub.pendingOwner()); + bridgehub.acceptOwnership(); + sharedBridge.acceptOwnership(); + ctmDeploymentTracker.acceptOwnership(); + vm.stopPrank(); + } + + function _setEraBatch() private { + vm.startPrank(sharedBridge.owner()); + // sharedBridge.setEraPostLegacyBridgeUpgradeFirstBatch(1); + // sharedBridge.setEraPostDiamondUpgradeFirstBatch(1); + vm.stopPrank(); + } + + function _registerNewToken(address _tokenAddress) internal { + bytes32 tokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, _tokenAddress); + if (!bridgehub.assetIdIsRegistered(tokenAssetId)) { + vm.prank(bridgehubOwnerAddress); + bridgehub.addTokenAssetId(tokenAssetId); + } + } + + function _registerNewTokens(address[] memory _tokens) internal { + for (uint256 i = 0; i < _tokens.length; i++) { + _registerNewToken(_tokens[i]); + } + } + + function _setSharedBridgeChainBalance(uint256 _chainId, address _token, uint256 _value) internal { + stdstore + .target(address(l1Nullifier)) + .sig(l1Nullifier.chainBalance.selector) + .with_key(_chainId) + .with_key(_token) + .checked_write(_value); + } + + function _setSharedBridgeIsWithdrawalFinalized( + uint256 _chainId, + uint256 _l2BatchNumber, + uint256 _l2ToL1MessageNumber, + bool _isFinalized + ) internal { + stdstore + .target(address(l1Nullifier)) + .sig(l1Nullifier.isWithdrawalFinalized.selector) + .with_key(_chainId) + .with_key(_l2BatchNumber) + .with_key(_l2ToL1MessageNumber) + .checked_write(_isFinalized); + } + + // add this to be excluded from coverage report + function test() internal virtual {} +} diff --git a/l1-contracts/test/foundry/l1/integration/_SharedL2TxMocker.t.sol b/l1-contracts/test/foundry/l1/integration/_SharedL2TxMocker.t.sol new file mode 100644 index 000000000..488811850 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/_SharedL2TxMocker.t.sol @@ -0,0 +1,81 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "contracts/common/Config.sol"; + +contract L2TxMocker is Test { + address mockRefundRecipient; + address mockL2Contract; + address mockL2SharedBridge; + + uint256 mockL2GasLimit = 10000000; + uint256 mockL2GasPerPubdataByteLimit = REQUIRED_L2_GAS_PRICE_PER_PUBDATA; + + bytes mockL2Calldata; + bytes[] mockFactoryDeps; + + mapping(uint256 chainId => address l2MockContract) public chainContracts; + + constructor() { + mockRefundRecipient = makeAddr("refundrecipient"); + mockL2Contract = makeAddr("mockl2contract"); + mockL2SharedBridge = makeAddr("mockl2sharedbridge"); + + mockL2Calldata = ""; + mockFactoryDeps = new bytes[](1); + mockFactoryDeps[0] = "11111111111111111111111111111111"; + } + + function _addL2ChainContract(uint256 _chainId, address _chainContract) internal { + chainContracts[_chainId] = _chainContract; + } + + function _createL2TransactionRequestDirect( + uint256 _chainId, + uint256 _mintValue, + uint256 _l2Value, + uint256 _l2GasLimit, + uint256 _l2GasPerPubdataByteLimit, + bytes memory _l2CallData + ) internal returns (L2TransactionRequestDirect memory request) { + request.chainId = _chainId; + request.mintValue = _mintValue; + request.l2Value = _l2Value; + request.l2GasLimit = _l2GasLimit; + request.l2GasPerPubdataByteLimit = _l2GasPerPubdataByteLimit; + request.l2Contract = chainContracts[_chainId]; + request.l2Calldata = _l2CallData; + + //mocked + request.factoryDeps = mockFactoryDeps; + request.refundRecipient = mockRefundRecipient; + } + + function _createL2TransactionRequestTwoBridges( + uint256 _chainId, + uint256 _mintValue, + uint256 _secondBridgeValue, + address _secondBridgeAddress, + uint256 _l2Value, + uint256 _l2GasLimit, + uint256 _l2GasPerPubdataByteLimit, + bytes memory _secondBridgeCalldata + ) internal returns (L2TransactionRequestTwoBridgesOuter memory request) { + request.chainId = _chainId; + request.mintValue = _mintValue; + request.secondBridgeAddress = _secondBridgeAddress; + request.secondBridgeValue = _secondBridgeValue; + request.l2Value = _l2Value; + request.l2GasLimit = _l2GasLimit; + request.l2GasPerPubdataByteLimit = _l2GasPerPubdataByteLimit; + request.secondBridgeCalldata = _secondBridgeCalldata; + + //mocks + request.refundRecipient = mockRefundRecipient; + } + + // add this to be excluded from coverage report + function testL2TxMocker() internal {} +} diff --git a/l1-contracts/test/foundry/l1/integration/_SharedTokenDeployer.t.sol b/l1-contracts/test/foundry/l1/integration/_SharedTokenDeployer.t.sol new file mode 100644 index 000000000..8696fd2a6 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/_SharedTokenDeployer.t.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {DeployErc20Script} from "deploy-scripts/DeployErc20.s.sol"; + +contract TokenDeployer is Test { + address[] tokens; + DeployErc20Script private deployScript; + + function _deployTokens() internal { + vm.setEnv( + "TOKENS_CONFIG", + "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-erc20.toml" + ); + + deployScript = new DeployErc20Script(); + deployScript.run(); + tokens = deployScript.getTokensAddresses(); + } + + // add this to be excluded from coverage report + function testTokenDeployer() internal {} +} diff --git a/l1-contracts/test/foundry/l1/integration/_SharedZKChainDeployer.t.sol b/l1-contracts/test/foundry/l1/integration/_SharedZKChainDeployer.t.sol new file mode 100644 index 000000000..8836fec99 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/_SharedZKChainDeployer.t.sol @@ -0,0 +1,193 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {StdStorage, stdStorage} from "forge-std/Test.sol"; + +import {L1ContractDeployer} from "./_SharedL1ContractDeployer.t.sol"; +import {RegisterZKChainScript} from "deploy-scripts/RegisterZKChain.s.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import "@openzeppelin/contracts-v4/utils/Strings.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; +import {IDiamondInit} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; + +import {Config as ChainConfig} from "deploy-scripts/RegisterZKChain.s.sol"; + +contract ZKChainDeployer is L1ContractDeployer { + using stdStorage for StdStorage; + + RegisterZKChainScript deployScript; + + struct ZKChainDescription { + uint256 zkChainChainId; + address baseToken; + uint256 bridgehubCreateNewChainSalt; + bool validiumMode; + address validatorSenderOperatorCommitEth; + address validatorSenderOperatorBlobsEth; + uint128 baseTokenGasPriceMultiplierNominator; + uint128 baseTokenGasPriceMultiplierDenominator; + } + + ChainConfig internal eraConfig; + + uint256 currentZKChainId = 10; + uint256 eraZKChainId = 9; + uint256[] public zkChainIds; + + function _deployEra() internal { + vm.setEnv( + "ZK_CHAIN_CONFIG", + "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-zk-chain-era.toml" + ); + vm.setEnv( + "ZK_CHAIN_OUT", + "/test/foundry/l1/integration/deploy-scripts/script-out/output-deploy-zk-chain-era.toml" + ); + deployScript = new RegisterZKChainScript(); + saveZKChainConfig(_getDefaultDescription(eraZKChainId, ETH_TOKEN_ADDRESS, eraZKChainId)); + vm.warp(100); + deployScript.runForTest(); + zkChainIds.push(eraZKChainId); + eraConfig = deployScript.getConfig(); + } + + function _deployZKChain(address _baseToken) internal { + vm.setEnv( + "ZK_CHAIN_CONFIG", + string.concat( + "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-zk-chain-", + Strings.toString(currentZKChainId), + ".toml" + ) + ); + vm.setEnv( + "ZK_CHAIN_OUT", + string.concat( + "/test/foundry/l1/integration/deploy-scripts/script-out/output-deploy-zk-chain-", + Strings.toString(currentZKChainId), + ".toml" + ) + ); + zkChainIds.push(currentZKChainId); + saveZKChainConfig(_getDefaultDescription(currentZKChainId, _baseToken, currentZKChainId)); + currentZKChainId++; + deployScript.runForTest(); + } + + function _getDefaultDescription( + uint256 __chainId, + address __baseToken, + uint256 __salt + ) internal returns (ZKChainDescription memory description) { + description = ZKChainDescription({ + zkChainChainId: __chainId, + baseToken: __baseToken, + bridgehubCreateNewChainSalt: __salt, + validiumMode: false, + validatorSenderOperatorCommitEth: address(0), + validatorSenderOperatorBlobsEth: address(1), + baseTokenGasPriceMultiplierNominator: uint128(1), + baseTokenGasPriceMultiplierDenominator: uint128(1) + }); + } + + function saveZKChainConfig(ZKChainDescription memory description) public { + string memory serialized; + + vm.serializeAddress("toml1", "owner_address", 0x70997970C51812dc3A010C7d01b50e0d17dc79C8); + vm.serializeUint("chain", "chain_chain_id", description.zkChainChainId); + vm.serializeAddress("chain", "base_token_addr", description.baseToken); + vm.serializeUint("chain", "bridgehub_create_new_chain_salt", description.bridgehubCreateNewChainSalt); + + uint256 validiumMode = 0; + + if (description.validiumMode) { + validiumMode = 1; + } + + vm.serializeUint("chain", "validium_mode", validiumMode); + vm.serializeAddress( + "chain", + "validator_sender_operator_commit_eth", + description.validatorSenderOperatorCommitEth + ); + vm.serializeAddress( + "chain", + "validator_sender_operator_blobs_eth", + description.validatorSenderOperatorBlobsEth + ); + vm.serializeUint( + "chain", + "base_token_gas_price_multiplier_nominator", + description.baseTokenGasPriceMultiplierNominator + ); + vm.serializeUint("chain", "governance_min_delay", 0); + vm.serializeAddress("chain", "governance_security_council_address", address(0)); + + string memory single_serialized = vm.serializeUint( + "chain", + "base_token_gas_price_multiplier_denominator", + description.baseTokenGasPriceMultiplierDenominator + ); + + string memory toml = vm.serializeString("toml1", "chain", single_serialized); + string memory path = string.concat(vm.projectRoot(), vm.envString("ZK_CHAIN_CONFIG")); + vm.writeToml(toml, path); + } + + function getZKChainAddress(uint256 _chainId) public view returns (address) { + return bridgehub.getZKChain(_chainId); + } + + function getZKChainBaseToken(uint256 _chainId) public view returns (address) { + return bridgehub.baseToken(_chainId); + } + + function acceptPendingAdmin() public { + IZKChain chain = IZKChain(bridgehub.getZKChain(currentZKChainId - 1)); + address admin = chain.getPendingAdmin(); + vm.startBroadcast(admin); + chain.acceptAdmin(); + vm.stopBroadcast(); + vm.deal(admin, 10000000000000000000000000); + } + + // add this to be excluded from coverage report + function testZKChainDeployer() internal {} + + function _deployZkChain( + uint256 _chainId, + bytes32 _baseTokenAssetId, + address _admin, + uint256 _protocolVersion, + bytes32 _storedBatchZero, + address _bridgehub + ) internal returns (address) { + Diamond.DiamondCutData memory diamondCut = abi.decode( + ecosystemConfig.contracts.diamondCutData, + (Diamond.DiamondCutData) + ); + bytes memory initData; + + { + initData = bytes.concat( + IDiamondInit.initialize.selector, + bytes32(_chainId), + bytes32(uint256(uint160(address(_bridgehub)))), + bytes32(uint256(uint160(address(this)))), + bytes32(_protocolVersion), + bytes32(uint256(uint160(_admin))), + bytes32(uint256(uint160(address(0x1337)))), + _baseTokenAssetId, + _storedBatchZero, + diamondCut.initCalldata + ); + } + diamondCut.initCalldata = initData; + DiamondProxy hyperchainContract = new DiamondProxy{salt: bytes32(0)}(block.chainid, diamondCut); + return address(hyperchainContract); + } +} diff --git a/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-erc20.toml b/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-erc20.toml new file mode 100644 index 000000000..dacf8865e --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-erc20.toml @@ -0,0 +1,15 @@ +additional_addresses_for_minting = [] + +[tokens.DAI] +name = "DAI" +symbol = "DAI" +decimals = 18 +implementation = "TestnetERC20Token.sol" +mint = "10000000000" + +[tokens.USDC] +name = "USDC" +symbol = "USDC" +decimals = 18 +implementation = "TestnetERC20Token.sol" +mint = "10000000000" diff --git a/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-l1.toml b/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-l1.toml new file mode 100644 index 000000000..d52a6b704 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-l1.toml @@ -0,0 +1,32 @@ +era_chain_id = 9 +owner_address = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" +testnet_verifier = true + +[contracts] +governance_security_council_address = "0x0000000000000000000000000000000000000000" +governance_min_delay = 0 +max_number_of_chains = 100 +create2_factory_salt = "0x00000000000000000000000000000000000000000000000000000000000000ff" +create2_factory_addr = "0x0000000000000000000000000000000000000000" +validator_timelock_execution_delay = 0 +genesis_root = "0x1000000000000000000000000000000000000000000000000000000000000000" +genesis_rollup_leaf_index = 1 +genesis_batch_commitment = "0x1000000000000000000000000000000000000000000000000000000000000000" +latest_protocol_version = 25 +recursion_node_level_vk_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +recursion_leaf_level_vk_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +recursion_circuits_set_vks_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +priority_tx_max_gas_limit = 80000000 +diamond_init_pubdata_pricing_mode = 0 +diamond_init_batch_overhead_l1_gas = 1000000 +diamond_init_max_pubdata_per_batch = 120000 +diamond_init_max_l2_gas_per_batch = 80000000 +diamond_init_priority_tx_max_pubdata = 99000 +diamond_init_minimal_l2_gas_price = 250000000 +bootloader_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +default_aa_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +force_deployments_data = "0x00" +diamond_cut_data = "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000060000000000000000000000000103aa417efc38582cbb322d23b86342cb3bca4a40000000000000000000000000000000000000000000000000000000000000de00000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000003a000000000000000000000000000000000000000000000000000000000000009c00000000000000000000000000000000000000000000000000000000000000bc00000000000000000000000005d8d8112ce7c189c3df7e80fce3cd96863acbbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000014a37dc1d400000000000000000000000000000000000000000000000000000000a3bd011200000000000000000000000000000000000000000000000000000000a9f6d94100000000000000000000000000000000000000000000000000000000be6f11cf00000000000000000000000000000000000000000000000000000000e76db86500000000000000000000000000000000000000000000000000000000fc57565f000000000000000000000000000000000000000000000000000000006223258e0000000000000000000000000000000000000000000000000000000064b554ad0000000000000000000000000000000000000000000000000000000064bf8d660000000000000000000000000000000000000000000000000000000082b57749000000000000000000000000000000000000000000000000000000008c564cc100000000000000000000000000000000000000000000000000000000235d9eb50000000000000000000000000000000000000000000000000000000027ae4c160000000000000000000000000000000000000000000000000000000043dc2951000000000000000000000000000000000000000000000000000000004623c91d000000000000000000000000000000000000000000000000000000004dd18bf5000000000000000000000000000000000000000000000000000000000e18b68100000000000000000000000000000000000000000000000000000000173389450000000000000000000000000000000000000000000000000000000021f603d7000000000000000000000000000000000000000000000000000000001cc5d103000000000000000000000000000000000000000000000000000000000000000000000000000000007684a3fd9f61c7c4d396177cc92384b076c6164a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000002c98acd7a6000000000000000000000000000000000000000000000000000000009cd939e4000000000000000000000000000000000000000000000000000000009d1b5a8100000000000000000000000000000000000000000000000000000000a1954fc500000000000000000000000000000000000000000000000000000000a7358efb00000000000000000000000000000000000000000000000000000000adfca15e00000000000000000000000000000000000000000000000000000000af6a2dcd00000000000000000000000000000000000000000000000000000000b22dd78e00000000000000000000000000000000000000000000000000000000b8c2f66f00000000000000000000000000000000000000000000000000000000bd7c541200000000000000000000000000000000000000000000000000000000c3bbd2d700000000000000000000000000000000000000000000000000000000cdffacc600000000000000000000000000000000000000000000000000000000d046815600000000000000000000000000000000000000000000000000000000d86970d800000000000000000000000000000000000000000000000000000000db1f0bf900000000000000000000000000000000000000000000000000000000e5355c7500000000000000000000000000000000000000000000000000000000e81e0ba100000000000000000000000000000000000000000000000000000000ea6c029c00000000000000000000000000000000000000000000000000000000ef3f0bae00000000000000000000000000000000000000000000000000000000f5c1182c00000000000000000000000000000000000000000000000000000000facd743b00000000000000000000000000000000000000000000000000000000fd791f3c00000000000000000000000000000000000000000000000000000000fe26699e00000000000000000000000000000000000000000000000000000000631f4bac000000000000000000000000000000000000000000000000000000006e9960c30000000000000000000000000000000000000000000000000000000074f4d30d0000000000000000000000000000000000000000000000000000000079823c9a000000000000000000000000000000000000000000000000000000007a0ed627000000000000000000000000000000000000000000000000000000007b30c8da0000000000000000000000000000000000000000000000000000000039d7d4aa0000000000000000000000000000000000000000000000000000000046657fe90000000000000000000000000000000000000000000000000000000052ef6b2c000000000000000000000000000000000000000000000000000000005518c73b0000000000000000000000000000000000000000000000000000000056142d7a000000000000000000000000000000000000000000000000000000001de72e340000000000000000000000000000000000000000000000000000000029b98c670000000000000000000000000000000000000000000000000000000033ce93fe000000000000000000000000000000000000000000000000000000003408e470000000000000000000000000000000000000000000000000000000003591c1a000000000000000000000000000000000000000000000000000000000396073820000000000000000000000000000000000000000000000000000000006d49e5b00000000000000000000000000000000000000000000000000000000086a56f8000000000000000000000000000000000000000000000000000000000ec6b0b70000000000000000000000000000000000000000000000000000000018e3a94100000000000000000000000000000000000000000000000000000000000000000000000000000000a36f72a317fa180d945efb5fe4383c9fbfe8c8be000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000b7efda2ae00000000000000000000000000000000000000000000000000000000b473318e00000000000000000000000000000000000000000000000000000000d06b26e200000000000000000000000000000000000000000000000000000000dcabb98200000000000000000000000000000000000000000000000000000000e4948f4300000000000000000000000000000000000000000000000000000000eb67241900000000000000000000000000000000000000000000000000000000042901c70000000000000000000000000000000000000000000000000000000008522c300000000000000000000000000000000000000000000000000000000012f43dab000000000000000000000000000000000000000000000000000000006c0960f900000000000000000000000000000000000000000000000000000000263b7f8e00000000000000000000000000000000000000000000000000000000000000000000000000000000340e73f0092fd6faec522a3b8a6b4b0d7242dbff0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000008701f58c5000000000000000000000000000000000000000000000000000000007f61885c0000000000000000000000000000000000000000000000000000000097c09d3400000000000000000000000000000000000000000000000000000000bd6db49900000000000000000000000000000000000000000000000000000000c37533bb0000000000000000000000000000000000000000000000000000000000a22e22000000000000000000000000000000000000000000000000000000000f23da43000000000000000000000000000000000000000000000000000000006edd4f120000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000ec2597d47a5416270d475b67e2b6cc20f35f4f50000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004c4b400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f4240000000000000000000000000000000000000000000000000000000000001d4c00000000000000000000000000000000000000000000000000000000004c4b40000000000000000000000000000000000000000000000000000000000000182b8000000000000000000000000000000000000000000000000000000000ee6b280000000000000000000000000a4cb26d6933d2c3e76718d30de8547bcdf8dd241" + +[tokens] +token_weth_address = "0x0000000000000000000000000000000000000000" diff --git a/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/generate-force-deployments-data.toml b/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/generate-force-deployments-data.toml new file mode 100644 index 000000000..15a0d7d43 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-config/generate-force-deployments-data.toml @@ -0,0 +1,7 @@ +era_chain_id = 9 +chain_id = 270 +l1_shared_bridge = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" +governance = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" +l2_legacy_shared_bridge = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" +l2_token_beacon = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" +l2_contracts_deployed_already = false diff --git a/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-out/.gitkeep b/l1-contracts/test/foundry/l1/integration/deploy-scripts/script-out/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2Erc20L1Test.t.sol b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2Erc20L1Test.t.sol new file mode 100644 index 000000000..b6346a09d --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2Erc20L1Test.t.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {BridgehubMintCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {IL2AssetRouter} from "contracts/bridge/asset-router/IL2AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {SharedL2ContractDeployer} from "./_SharedL2ContractDeployer.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {SystemContractsArgs} from "./_SharedL2ContractL1DeployerUtils.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; +import {L2Erc20TestAbstract} from "./L2Erc20TestAbstract.t.sol"; +import {SharedL2ContractL1DeployerUtils} from "./_SharedL2ContractL1DeployerUtils.sol"; + +contract L2Erc20L1Test is Test, SharedL2ContractL1DeployerUtils, SharedL2ContractDeployer, L2Erc20TestAbstract { + function test() internal virtual override(DeployUtils, SharedL2ContractL1DeployerUtils) {} + + function initSystemContracts( + SystemContractsArgs memory _args + ) internal virtual override(SharedL2ContractDeployer, SharedL2ContractL1DeployerUtils) { + super.initSystemContracts(_args); + } + + function deployL2Contracts( + uint256 _l1ChainId + ) public virtual override(SharedL2ContractDeployer, SharedL2ContractL1DeployerUtils) { + super.deployL2Contracts(_l1ChainId); + } +} diff --git a/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2Erc20TestAbstract.t.sol b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2Erc20TestAbstract.t.sol new file mode 100644 index 000000000..fcabf0d01 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2Erc20TestAbstract.t.sol @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {BridgehubMintCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {IL2AssetRouter} from "contracts/bridge/asset-router/IL2AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {SharedL2ContractDeployer} from "./_SharedL2ContractDeployer.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {SystemContractsArgs} from "./_SharedL2ContractL1DeployerUtils.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; + +abstract contract L2Erc20TestAbstract is Test, SharedL2ContractDeployer { + function performDeposit(address depositor, address receiver, uint256 amount) internal { + vm.prank(aliasedL1AssetRouter); + L2AssetRouter(L2_ASSET_ROUTER_ADDR).finalizeDeposit({ + _l1Sender: depositor, + _l2Receiver: receiver, + _l1Token: L1_TOKEN_ADDRESS, + _amount: amount, + _data: encodeTokenData(TOKEN_DEFAULT_NAME, TOKEN_DEFAULT_SYMBOL, TOKEN_DEFAULT_DECIMALS) + }); + } + + function initializeTokenByDeposit() internal returns (address l2TokenAddress) { + performDeposit(makeAddr("someDepositor"), makeAddr("someReeiver"), 1); + + l2TokenAddress = IL2NativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).l2TokenAddress(L1_TOKEN_ADDRESS); + require(l2TokenAddress != address(0), "Token not initialized"); + } + + function test_shouldFinalizeERC20Deposit() public { + address depositor = makeAddr("depositor"); + address receiver = makeAddr("receiver"); + + performDeposit(depositor, receiver, 100); + + address l2TokenAddress = IL2NativeTokenVault(L2_NATIVE_TOKEN_VAULT_ADDR).l2TokenAddress(L1_TOKEN_ADDRESS); + + assertEq(BridgedStandardERC20(l2TokenAddress).balanceOf(receiver), 100); + assertEq(BridgedStandardERC20(l2TokenAddress).totalSupply(), 100); + assertEq(BridgedStandardERC20(l2TokenAddress).name(), TOKEN_DEFAULT_NAME); + assertEq(BridgedStandardERC20(l2TokenAddress).symbol(), TOKEN_DEFAULT_SYMBOL); + assertEq(BridgedStandardERC20(l2TokenAddress).decimals(), TOKEN_DEFAULT_DECIMALS); + } + + function test_governanceShouldBeAbleToReinitializeToken() public { + address l2TokenAddress = initializeTokenByDeposit(); + + BridgedStandardERC20.ERC20Getters memory getters = BridgedStandardERC20.ERC20Getters({ + ignoreName: false, + ignoreSymbol: false, + ignoreDecimals: false + }); + + vm.prank(ownerWallet); + BridgedStandardERC20(l2TokenAddress).reinitializeToken(getters, "TestTokenNewName", "TTN", 2); + assertEq(BridgedStandardERC20(l2TokenAddress).name(), "TestTokenNewName"); + assertEq(BridgedStandardERC20(l2TokenAddress).symbol(), "TTN"); + // The decimals should stay the same + assertEq(BridgedStandardERC20(l2TokenAddress).decimals(), 18); + } + + function test_governanceShouldNotBeAbleToSkipInitializerVersions() public { + address l2TokenAddress = initializeTokenByDeposit(); + + BridgedStandardERC20.ERC20Getters memory getters = BridgedStandardERC20.ERC20Getters({ + ignoreName: false, + ignoreSymbol: false, + ignoreDecimals: false + }); + + vm.expectRevert(); + vm.prank(ownerWallet); + BridgedStandardERC20(l2TokenAddress).reinitializeToken(getters, "TestTokenNewName", "TTN", 20); + } +} diff --git a/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2GatewayL1Test.t.sol b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2GatewayL1Test.t.sol new file mode 100644 index 000000000..3e8b04e42 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2GatewayL1Test.t.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {BridgehubMintCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {IL2AssetRouter} from "contracts/bridge/asset-router/IL2AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {SharedL2ContractDeployer} from "./_SharedL2ContractDeployer.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {SystemContractsArgs} from "./_SharedL2ContractL1DeployerUtils.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; +import {L2GatewayTestAbstract} from "./L2GatewayTestAbstract.t.sol"; +import {SharedL2ContractL1DeployerUtils} from "./_SharedL2ContractL1DeployerUtils.sol"; + +contract L2GatewayL1Test is Test, SharedL2ContractL1DeployerUtils, SharedL2ContractDeployer, L2GatewayTestAbstract { + function test() internal virtual override(DeployUtils, SharedL2ContractL1DeployerUtils) {} + + function initSystemContracts( + SystemContractsArgs memory _args + ) internal virtual override(SharedL2ContractDeployer, SharedL2ContractL1DeployerUtils) { + super.initSystemContracts(_args); + } + + function deployL2Contracts( + uint256 _l1ChainId + ) public virtual override(SharedL2ContractDeployer, SharedL2ContractL1DeployerUtils) { + super.deployL2Contracts(_l1ChainId); + } +} diff --git a/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2GatewayTestAbstract.t.sol b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2GatewayTestAbstract.t.sol new file mode 100644 index 000000000..e97c2017b --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2GatewayTestAbstract.t.sol @@ -0,0 +1,85 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR, L2_MESSENGER} from "contracts/common/L2ContractAddresses.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {BridgehubMintCTMAssetData, BridgehubBurnCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {IL2AssetRouter} from "contracts/bridge/asset-router/IL2AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {SharedL2ContractDeployer} from "./_SharedL2ContractDeployer.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {SystemContractsArgs} from "./_SharedL2ContractL1DeployerUtils.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; + +abstract contract L2GatewayTestAbstract is Test, SharedL2ContractDeployer { + function test_gatewayShouldFinalizeDeposit() public { + finalizeDeposit(); + require(l2Bridgehub.ctmAssetIdFromAddress(address(chainTypeManager)) == ctmAssetId, "ctmAssetId mismatch"); + require(l2Bridgehub.ctmAssetIdFromChainId(mintChainId) == ctmAssetId, "ctmAssetIdFromChainId mismatch"); + } + + function test_forwardToL3OnGateway() public { + // todo fix this test + finalizeDeposit(); + vm.prank(SETTLEMENT_LAYER_RELAY_SENDER); + l2Bridgehub.forwardTransactionOnGateway(mintChainId, bytes32(0), 0); + } + + function test_withdrawFromGateway() public { + // todo fix this test + finalizeDeposit(); + address newAdmin = address(0x1); + bytes memory newDiamondCut = abi.encode(); + BridgehubBurnCTMAssetData memory data = BridgehubBurnCTMAssetData({ + chainId: mintChainId, + ctmData: abi.encode(newAdmin, config.contracts.diamondCutData), + chainData: abi.encode(chainTypeManager.protocolVersion()) + }); + vm.prank(ownerWallet); + vm.mockCall( + address(L2_MESSENGER), + abi.encodeWithSelector(L2_MESSENGER.sendToL1.selector), + abi.encode(bytes("")) + ); + l2AssetRouter.withdraw(ctmAssetId, abi.encode(data)); + } + + function finalizeDeposit() public { + bytes memory chainData = exampleChainCommitment; + bytes memory ctmData = abi.encode( + baseTokenAssetId, + ownerWallet, + chainTypeManager.protocolVersion(), + config.contracts.diamondCutData + ); + BridgehubMintCTMAssetData memory data = BridgehubMintCTMAssetData({ + chainId: mintChainId, + baseTokenAssetId: baseTokenAssetId, + ctmData: ctmData, + chainData: chainData + }); + vm.prank(aliasedL1AssetRouter); + l2AssetRouter.finalizeDeposit(L1_CHAIN_ID, ctmAssetId, abi.encode(data)); + } +} diff --git a/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2WethTestAbstract.t.sol b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2WethTestAbstract.t.sol new file mode 100644 index 000000000..d2a43be2e --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/L2WethTestAbstract.t.sol @@ -0,0 +1,123 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {BridgehubMintCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {IL2AssetRouter} from "contracts/bridge/asset-router/IL2AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {SharedL2ContractDeployer} from "./_SharedL2ContractDeployer.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {SystemContractsArgs} from "./_SharedL2ContractL1DeployerUtils.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; +import {Unauthorized, UnimplementedMessage, BridgeMintNotImplemented} from "contracts/common/L1ContractErrors.sol"; + +abstract contract L2WethTestAbstract is Test, SharedL2ContractDeployer { + function test_shouldDepositWethByCallingDeposit() public { + uint256 amount = 100; + weth.deposit{value: amount}(); + assertEq(weth.balanceOf(address(this)), amount); + } + + function test_shouldDepositWethBySendingEth() public { + uint256 amount = 100; + address(weth).call{value: amount}(""); + assertEq(weth.balanceOf(address(this)), amount); + } + + function test_revertWhenDepositingWithRandomCalldata() public { + (bool success, ) = address(weth).call{value: 100}(hex"00000000"); + assertEq(success, false); + } + + function test_shouldWithdrawWethToL2Eth() public { + address sender = makeAddr("sender"); + uint256 amount = 100; + + vm.deal(sender, amount); + + vm.prank(sender); + weth.deposit{value: amount}(); + + vm.prank(sender); + weth.withdraw(amount); + + assertEq(weth.balanceOf(sender), 0); + assertEq(address(sender).balance, amount); + } + + function test_shouldDepositWethToAnotherAccount() public { + address sender = makeAddr("sender"); + address receiver = makeAddr("receiver"); + + uint256 amount = 100; + + vm.deal(sender, amount); + + vm.prank(sender); + weth.depositTo{value: amount}(receiver); + + assertEq(weth.balanceOf(receiver), amount); + assertEq(weth.balanceOf(sender), 0); + } + + function test_shouldWithdrawWethToAnotherAccount() public { + address sender = makeAddr("sender"); + address receiver = makeAddr("receiver"); + + uint256 amount = 100; + + vm.deal(sender, amount); + + vm.prank(sender); + weth.deposit{value: amount}(); + + vm.prank(sender); + weth.withdrawTo(receiver, amount); + + assertEq(receiver.balance, amount); + assertEq(sender.balance, 0); + } + + function test_revertWhenWithdrawingMoreThanBalance() public { + vm.expectRevert(); + weth.withdraw(1); + } + + function test_revertWhenCallingBridgeMint() public { + vm.expectRevert(abi.encodeWithSelector(BridgeMintNotImplemented.selector)); + vm.prank(L2_ASSET_ROUTER_ADDR); + weth.bridgeMint(address(1), 1); + } + + function test_revertWhenCallingBridgeMintDirectly() public { + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); + weth.bridgeMint(address(1), 1); + } + + function test_revertWhenCallingBridgeBurnDirectly() public { + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); + weth.bridgeBurn(address(1), 1); + } +} diff --git a/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/_SharedL2ContractDeployer.sol b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/_SharedL2ContractDeployer.sol new file mode 100644 index 000000000..83d8def98 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/_SharedL2ContractDeployer.sol @@ -0,0 +1,202 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; + +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {BridgehubMintCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {IL2AssetRouter} from "contracts/bridge/asset-router/IL2AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2WrappedBaseToken} from "contracts/bridge/L2WrappedBaseToken.sol"; +import {L2SharedBridgeLegacy} from "contracts/bridge/L2SharedBridgeLegacy.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; + +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {SystemContractsArgs} from "./_SharedL2ContractL1DeployerUtils.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; + +abstract contract SharedL2ContractDeployer is Test, DeployUtils { + L2WrappedBaseToken internal weth; + address internal l1WethAddress = address(4); + + // The owner of the beacon and the native token vault + address internal ownerWallet = address(2); + + BridgedStandardERC20 internal standardErc20Impl; + + UpgradeableBeacon internal beacon; + BeaconProxy internal proxy; + + IL2AssetRouter l2AssetRouter = IL2AssetRouter(L2_ASSET_ROUTER_ADDR); + IBridgehub l2Bridgehub = IBridgehub(L2_BRIDGEHUB_ADDR); + + uint256 internal constant L1_CHAIN_ID = 10; // it cannot be 9, the default block.chainid + uint256 internal ERA_CHAIN_ID = 270; + uint256 internal mintChainId = 300; + address internal l1AssetRouter = makeAddr("l1AssetRouter"); + address internal aliasedL1AssetRouter = AddressAliasHelper.applyL1ToL2Alias(l1AssetRouter); + + // We won't actually deploy an L1 token in these tests, but we need some address for it. + address internal L1_TOKEN_ADDRESS = 0x1111100000000000000000000000000000011111; + + string internal constant TOKEN_DEFAULT_NAME = "TestnetERC20Token"; + string internal constant TOKEN_DEFAULT_SYMBOL = "TET"; + uint8 internal constant TOKEN_DEFAULT_DECIMALS = 18; + address internal l1CTMDeployer = makeAddr("l1CTMDeployer"); + address internal l1CTM = makeAddr("l1CTM"); + bytes32 internal ctmAssetId = keccak256(abi.encode(L1_CHAIN_ID, l1CTMDeployer, bytes32(uint256(uint160(l1CTM))))); + + bytes32 internal baseTokenAssetId = + keccak256(abi.encode(L1_CHAIN_ID, L2_NATIVE_TOKEN_VAULT_ADDR, abi.encode(ETH_TOKEN_ADDRESS))); + + bytes internal exampleChainCommitment; + + IChainTypeManager internal chainTypeManager; + + function setUp() public { + standardErc20Impl = new BridgedStandardERC20(); + beacon = new UpgradeableBeacon(address(standardErc20Impl)); + beacon.transferOwnership(ownerWallet); + + // One of the purposes of deploying it here is to publish its bytecode + BeaconProxy beaconProxy = new BeaconProxy(address(beacon), new bytes(0)); + proxy = beaconProxy; + bytes32 beaconProxyBytecodeHash; + assembly { + beaconProxyBytecodeHash := extcodehash(beaconProxy) + } + + address l2SharedBridge = deployL2SharedBridgeLegacy( + L1_CHAIN_ID, + ERA_CHAIN_ID, + ownerWallet, + l1AssetRouter, + beaconProxyBytecodeHash + ); + + L2WrappedBaseToken weth = deployL2Weth(); + + initSystemContracts( + SystemContractsArgs({ + l1ChainId: L1_CHAIN_ID, + eraChainId: ERA_CHAIN_ID, + l1AssetRouter: l1AssetRouter, + legacySharedBridge: l2SharedBridge, + l2TokenBeacon: address(beacon), + l2TokenProxyBytecodeHash: beaconProxyBytecodeHash, + aliasedOwner: ownerWallet, + contractsDeployedAlready: false, + l1CtmDeployer: l1CTMDeployer + }) + ); + deployL2Contracts(L1_CHAIN_ID); + + vm.prank(aliasedL1AssetRouter); + l2AssetRouter.setAssetHandlerAddress(L1_CHAIN_ID, ctmAssetId, L2_BRIDGEHUB_ADDR); + vm.prank(ownerWallet); + l2Bridgehub.addChainTypeManager(address(addresses.stateTransition.chainTypeManagerProxy)); + vm.prank(AddressAliasHelper.applyL1ToL2Alias(l1CTMDeployer)); + l2Bridgehub.setAssetHandlerAddress( + bytes32(uint256(uint160(l1CTM))), + address(addresses.stateTransition.chainTypeManagerProxy) + ); + chainTypeManager = IChainTypeManager(address(addresses.stateTransition.chainTypeManagerProxy)); + getExampleChainCommitment(); + } + + function getExampleChainCommitment() internal returns (bytes memory) { + vm.mockCall( + L2_ASSET_ROUTER_ADDR, + abi.encodeWithSelector(IL1AssetRouter.L1_NULLIFIER.selector), + abi.encode(L2_ASSET_ROUTER_ADDR) + ); + vm.mockCall( + L2_ASSET_ROUTER_ADDR, + abi.encodeWithSelector(IL1Nullifier.l2BridgeAddress.selector), + abi.encode(address(0)) + ); + vm.prank(L2_BRIDGEHUB_ADDR); + address chainAddress = chainTypeManager.createNewChain( + ERA_CHAIN_ID + 1, + baseTokenAssetId, + address(0x1), + abi.encode(config.contracts.diamondCutData, generatedData.forceDeploymentsData), + new bytes[](0) + ); + exampleChainCommitment = abi.encode(IZKChain(chainAddress).prepareChainCommitment()); + } + + /// @notice Encodes the token data. + /// @param name The name of the token. + /// @param symbol The symbol of the token. + /// @param decimals The decimals of the token. + function encodeTokenData( + string memory name, + string memory symbol, + uint8 decimals + ) internal pure returns (bytes memory) { + bytes memory encodedName = abi.encode(name); + bytes memory encodedSymbol = abi.encode(symbol); + bytes memory encodedDecimals = abi.encode(decimals); + + return abi.encode(encodedName, encodedSymbol, encodedDecimals); + } + + function deployL2SharedBridgeLegacy( + uint256 _l1ChainId, + uint256 _eraChainId, + address _aliasedOwner, + address _l1SharedBridge, + bytes32 _l2TokenProxyBytecodeHash + ) internal returns (address) { + bytes32 ethAssetId = DataEncoding.encodeNTVAssetId(_l1ChainId, ETH_TOKEN_ADDRESS); + + L2SharedBridgeLegacy bridge = new L2SharedBridgeLegacy(); + console.log("bridge", address(bridge)); + address proxyAdmin = address(0x1); + TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy( + address(bridge), + proxyAdmin, + abi.encodeWithSelector( + L2SharedBridgeLegacy.initialize.selector, + _l1SharedBridge, + _l2TokenProxyBytecodeHash, + _aliasedOwner + ) + ); + console.log("proxy", address(proxy)); + return address(proxy); + } + + function deployL2Weth() internal returns (L2WrappedBaseToken) { + L2WrappedBaseToken wethImpl = new L2WrappedBaseToken(); + TransparentUpgradeableProxy wethProxy = new TransparentUpgradeableProxy(address(wethImpl), ownerWallet, ""); + weth = L2WrappedBaseToken(payable(wethProxy)); + weth.initializeV2("Wrapped Ether", "WETH", L2_ASSET_ROUTER_ADDR, l1WethAddress, baseTokenAssetId); + return weth; + } + + function initSystemContracts(SystemContractsArgs memory _args) internal virtual; + function deployL2Contracts(uint256 _l1ChainId) public virtual; +} diff --git a/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/_SharedL2ContractL1DeployerUtils.sol b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/_SharedL2ContractL1DeployerUtils.sol new file mode 100644 index 000000000..9dd125055 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/l2-tests-in-l1-context/_SharedL2ContractL1DeployerUtils.sol @@ -0,0 +1,129 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {StdStorage, stdStorage, stdToml} from "forge-std/Test.sol"; +import {Script, console2 as console} from "forge-std/Script.sol"; + +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {CTMDeploymentTracker} from "contracts/bridgehub/CTMDeploymentTracker.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {DeployedAddresses, Config} from "deploy-scripts/DeployUtils.s.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; + +import {L2_MESSAGE_ROOT_ADDR, L2_BRIDGEHUB_ADDR, L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR} from "contracts/common/L2ContractAddresses.sol"; + +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {L2NativeTokenVault} from "contracts/bridge/ntv/L2NativeTokenVault.sol"; +import {L2NativeTokenVaultDev} from "contracts/dev-contracts/test/L2NativeTokenVaultDev.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; + +struct SystemContractsArgs { + uint256 l1ChainId; + uint256 eraChainId; + address l1AssetRouter; + address legacySharedBridge; + address l2TokenBeacon; + bytes32 l2TokenProxyBytecodeHash; + address aliasedOwner; + bool contractsDeployedAlready; + address l1CtmDeployer; +} + +contract SharedL2ContractL1DeployerUtils is DeployUtils { + using stdToml for string; + using stdStorage for StdStorage; + + /// @dev We provide a fast form of debugging the L2 contracts using L1 foundry. We also test using zk foundry. + function initSystemContracts(SystemContractsArgs memory _args) internal virtual { + bytes32 baseTokenAssetId = DataEncoding.encodeNTVAssetId(_args.l1ChainId, ETH_TOKEN_ADDRESS); + address wethToken = address(0x1); + // we deploy the code to get the contract code with immutables which we then vm.etch + address messageRoot = address(new MessageRoot(IBridgehub(L2_BRIDGEHUB_ADDR))); + address bridgehub = address(new Bridgehub(_args.l1ChainId, _args.aliasedOwner, 100)); + address assetRouter = address( + new L2AssetRouter( + _args.l1ChainId, + _args.eraChainId, + _args.l1AssetRouter, + _args.legacySharedBridge, + baseTokenAssetId, + _args.aliasedOwner + ) + ); + address ntv = address( + new L2NativeTokenVaultDev( + _args.l1ChainId, + _args.aliasedOwner, + _args.l2TokenProxyBytecodeHash, + _args.legacySharedBridge, + _args.l2TokenBeacon, + _args.contractsDeployedAlready, + wethToken, + baseTokenAssetId + ) + ); + + vm.etch(L2_MESSAGE_ROOT_ADDR, messageRoot.code); + MessageRoot(L2_MESSAGE_ROOT_ADDR).initialize(); + + vm.etch(L2_BRIDGEHUB_ADDR, bridgehub.code); + uint256 prevChainId = block.chainid; + vm.chainId(_args.l1ChainId); + Bridgehub(L2_BRIDGEHUB_ADDR).initialize(_args.aliasedOwner); + vm.chainId(prevChainId); + vm.prank(_args.aliasedOwner); + Bridgehub(L2_BRIDGEHUB_ADDR).setAddresses( + L2_ASSET_ROUTER_ADDR, + ICTMDeploymentTracker(_args.l1CtmDeployer), + IMessageRoot(L2_MESSAGE_ROOT_ADDR) + ); + + vm.etch(L2_ASSET_ROUTER_ADDR, assetRouter.code); + stdstore.target(address(L2_ASSET_ROUTER_ADDR)).sig("l1AssetRouter()").checked_write(_args.l1AssetRouter); + + stdstore + .target(L2_ASSET_ROUTER_ADDR) + .sig("assetHandlerAddress(bytes32)") + .with_key(baseTokenAssetId) + .checked_write(bytes32(uint256(uint160(L2_NATIVE_TOKEN_VAULT_ADDR)))); + + vm.etch(L2_NATIVE_TOKEN_VAULT_ADDR, ntv.code); + + vm.store(L2_NATIVE_TOKEN_VAULT_ADDR, bytes32(uint256(251)), bytes32(uint256(_args.l2TokenProxyBytecodeHash))); + L2NativeTokenVaultDev(L2_NATIVE_TOKEN_VAULT_ADDR).deployBridgedStandardERC20(_args.aliasedOwner); + } + + function deployL2Contracts(uint256 _l1ChainId) public virtual { + string memory root = vm.projectRoot(); + string memory inputPath = string.concat( + root, + "/test/foundry/l1/integration/deploy-scripts/script-config/config-deploy-l1.toml" + ); + initializeConfig(inputPath); + addresses.transparentProxyAdmin = address(0x1); + addresses.bridgehub.bridgehubProxy = L2_BRIDGEHUB_ADDR; + addresses.bridges.sharedBridgeProxy = L2_ASSET_ROUTER_ADDR; + addresses.vaults.l1NativeTokenVaultProxy = L2_NATIVE_TOKEN_VAULT_ADDR; + addresses.blobVersionedHashRetriever = address(0x1); + config.l1ChainId = _l1ChainId; + console.log("Deploying L2 contracts"); + instantiateCreate2Factory(); + deployGenesisUpgrade(); + deployVerifier(); + deployValidatorTimelock(); + deployChainTypeManagerContract(); + } + + // add this to be excluded from coverage report + function test() internal virtual override {} +} diff --git a/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-config/mainnet-era.toml b/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-config/mainnet-era.toml new file mode 100644 index 000000000..d48013384 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-config/mainnet-era.toml @@ -0,0 +1,7 @@ +owner_address = "0x4e4943346848c4867f81dfb37c4ca9c5715a7828" + +[chain] +chain_id = 324 +diamond_proxy_address = "0x32400084c286cf3e17e7b677ea9583e60a000324" +validium_mode = false +permanent_rollup = true diff --git a/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-config/mainnet.toml b/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-config/mainnet.toml new file mode 100644 index 000000000..abf681e37 --- /dev/null +++ b/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-config/mainnet.toml @@ -0,0 +1,37 @@ +era_chain_id = 324 +owner_address = "8f7a9912416e8adc4d9c21fae1415d3318a11897" +testnet_verifier = false + +[contracts] +max_number_of_chains = 100 +create2_factory_salt = "0xde6b9c610417de5c775c1601c947f482e4f4e30c0f7b848c6d2b0554d76f607e" +validator_timelock_execution_delay = 0 +genesis_root = "0xf9030b78c5bf5ac997a76962aa32c90a6d8e8ebce9838c8eeb388d73e1f7659a" +genesis_rollup_leaf_index = 64 +genesis_batch_commitment = "0x34c1b220363e0cde7eaf10fe95754d61de097e0f9d9a1dc56c8026562e395259" +latest_protocol_version = "0x1900000000" +recursion_node_level_vk_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +recursion_leaf_level_vk_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +recursion_circuits_set_vks_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +priority_tx_max_gas_limit = 72000000 +diamond_init_pubdata_pricing_mode = 0 +diamond_init_batch_overhead_l1_gas = 1000000 +diamond_init_max_pubdata_per_batch = 120000 +diamond_init_max_l2_gas_per_batch = 80000000 +diamond_init_priority_tx_max_pubdata = 99000 +diamond_init_minimal_l2_gas_price = 250000000 +bootloader_hash = "0x010008c753336bc8d1ddca235602b9f31d346412b2d463cd342899f7bfb73baf" +default_aa_hash = "0x0100055d760f11a3d737e7fd1816e600a4cd874a9f17f7a225d1f1c537c51a1e" +bridgehub_proxy_address = "0x303a465B659cBB0ab36eE643eA362c509EEb5213" +old_shared_bridge_proxy_address = "0xD7f9f54194C633F36CCD5F3da84ad4a1c38cB2cB" +state_transition_manager_address = "0xc2eE6b6af7d616f6e27ce7F4A451Aedc2b0F5f5C" +transparent_proxy_admin = "0xC2a36181fB524a6bEfE639aFEd37A67e77d62cf1" +era_diamond_proxy = "0x32400084c286cf3e17e7b677ea9583e60a000324" +blob_versioned_hash_retriever = "0x0000000000000000000000000000000000000001" +legacy_erc20_bridge_address = "0x57891966931eb4bb6fb81430e6ce0a03aabde063" +old_validator_timelock = "0x5D8ba173Dc6C3c90C8f7C04C9288BeF5FDbAd06E" +l2_bridge_proxy_owner_address = "0x0000000000000000000000000000000000000001" +l2_bridged_standard_erc20_proxy_owner_address = "0x0000000000000000000000000000000000000001" + +[tokens] +token_weth_address = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2" diff --git a/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-out/.gitkeep b/l1-contracts/test/foundry/l1/integration/upgrade-envs/script-out/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/l1-contracts/test/foundry/unit/concrete/AddressAliasHelper/_AddressAliasHelper_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/AddressAliasHelper/_AddressAliasHelper_Shared.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/AddressAliasHelper/_AddressAliasHelper_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/AddressAliasHelper/_AddressAliasHelper_Shared.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/AddressAliasHelper/applyL1ToL2Alias.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/AddressAliasHelper/applyL1ToL2Alias.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/AddressAliasHelper/applyL1ToL2Alias.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/AddressAliasHelper/applyL1ToL2Alias.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/AddressAliasHelper/undoL1ToL2Alias.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/AddressAliasHelper/undoL1ToL2Alias.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/AddressAliasHelper/undoL1ToL2Alias.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/AddressAliasHelper/undoL1ToL2Alias.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/Bridgehub/Initialize.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/Initialize.t.sol similarity index 96% rename from l1-contracts/test/foundry/unit/concrete/Bridgehub/Initialize.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/Initialize.t.sol index 38bba5ec2..a31095dea 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridgehub/Initialize.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/Initialize.t.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8.17; +pragma solidity ^0.8.21; import {BridgehubTest} from "./_Bridgehub_Shared.t.sol"; diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/MessageRoot.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/MessageRoot.t.sol new file mode 100644 index 000000000..34ca5f9d2 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/MessageRoot.t.sol @@ -0,0 +1,117 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {Merkle} from "contracts/common/libraries/Merkle.sol"; +import {MessageHashing} from "contracts/common/libraries/MessageHashing.sol"; + +// Chain tree consists of batch commitments as their leaves. We use hash of "new bytes(96)" as the hash of an empty leaf. +bytes32 constant CHAIN_TREE_EMPTY_ENTRY_HASH = bytes32( + 0x46700b4d40ac5c35af2c22dda2787a91eb567b06c924a8fb8ae9a05b20c08c21 +); + +// Chain tree consists of batch commitments as their leaves. We use hash of "new bytes(96)" as the hash of an empty leaf. +bytes32 constant SHARED_ROOT_TREE_EMPTY_HASH = bytes32( + 0x46700b4d40ac5c35af2c22dda2787a91eb567b06c924a8fb8ae9a05b20c08c21 +); + +contract MessageRootTest is Test { + address bridgeHub; + MessageRoot messageRoot; + + function setUp() public { + bridgeHub = makeAddr("bridgeHub"); + messageRoot = new MessageRoot(IBridgehub(bridgeHub)); + } + + function test_init() public { + assertEq(messageRoot.getAggregatedRoot(), (MessageHashing.chainIdLeafHash(0x00, block.chainid))); + } + + function test_RevertWhen_addChainNotBridgeHub() public { + uint256 alphaChainId = uint256(uint160(makeAddr("alphaChainId"))); + uint256 betaChainId = uint256(uint160(makeAddr("betaChainId"))); + + assertFalse(messageRoot.chainRegistered(alphaChainId), "alpha chain 1"); + + vm.expectRevert("MR: only bridgehub"); + messageRoot.addNewChain(alphaChainId); + + assertFalse(messageRoot.chainRegistered(alphaChainId), "alpha chain 2"); + } + + function test_addNewChain() public { + uint256 alphaChainId = uint256(uint160(makeAddr("alphaChainId"))); + uint256 betaChainId = uint256(uint160(makeAddr("betaChainId"))); + + assertFalse(messageRoot.chainRegistered(alphaChainId), "alpha chain 1"); + assertFalse(messageRoot.chainRegistered(betaChainId), "beta chain 1"); + + vm.prank(bridgeHub); + vm.expectEmit(true, false, false, false); + emit MessageRoot.AddedChain(alphaChainId, 0); + messageRoot.addNewChain(alphaChainId); + + assertTrue(messageRoot.chainRegistered(alphaChainId), "alpha chain 2"); + assertFalse(messageRoot.chainRegistered(betaChainId), "beta chain 2"); + + assertEq(messageRoot.getChainRoot(alphaChainId), bytes32(0)); + } + + function test_RevertWhen_ChainNotRegistered() public { + address alphaChainSender = makeAddr("alphaChainSender"); + uint256 alphaChainId = uint256(uint160(makeAddr("alphaChainId"))); + vm.mockCall( + bridgeHub, + abi.encodeWithSelector(IBridgehub.getZKChain.selector, alphaChainId), + abi.encode(alphaChainSender) + ); + + vm.prank(alphaChainSender); + vm.expectRevert("MR: not registered"); + messageRoot.addChainBatchRoot(alphaChainId, 1, bytes32(alphaChainId)); + } + + function test_addChainBatchRoot() public { + address alphaChainSender = makeAddr("alphaChainSender"); + uint256 alphaChainId = uint256(uint160(makeAddr("alphaChainId"))); + vm.mockCall( + bridgeHub, + abi.encodeWithSelector(IBridgehub.getZKChain.selector, alphaChainId), + abi.encode(alphaChainSender) + ); + + vm.prank(bridgeHub); + messageRoot.addNewChain(alphaChainId); + + vm.prank(alphaChainSender); + vm.expectEmit(true, false, false, false); + emit MessageRoot.Preimage(bytes32(0), bytes32(0)); + vm.expectEmit(true, false, false, false); + emit MessageRoot.AppendedChainBatchRoot(alphaChainId, 1, bytes32(alphaChainId)); + messageRoot.addChainBatchRoot(alphaChainId, 1, bytes32(alphaChainId)); + } + + function test_updateFullTree() public { + address alphaChainSender = makeAddr("alphaChainSender"); + uint256 alphaChainId = uint256(uint160(makeAddr("alphaChainId"))); + vm.mockCall( + bridgeHub, + abi.encodeWithSelector(IBridgehub.getZKChain.selector, alphaChainId), + abi.encode(alphaChainSender) + ); + + vm.prank(bridgeHub); + messageRoot.addNewChain(alphaChainId); + + vm.prank(alphaChainSender); + messageRoot.addChainBatchRoot(alphaChainId, 1, bytes32(alphaChainId)); + + messageRoot.updateFullTree(); + + assertEq(messageRoot.getAggregatedRoot(), 0x0ef1ac67d77f177a33449c47a8f05f0283300a81adca6f063c92c774beed140c); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridgehub/_Bridgehub_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/_Bridgehub_Shared.t.sol similarity index 98% rename from l1-contracts/test/foundry/unit/concrete/Bridgehub/_Bridgehub_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/_Bridgehub_Shared.t.sol index 3d0b445a5..54d264daa 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridgehub/_Bridgehub_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/_Bridgehub_Shared.t.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8.17; +pragma solidity ^0.8.21; import {Test} from "forge-std/Test.sol"; diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/experimental_bridge.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/experimental_bridge.t.sol new file mode 100644 index 000000000..6ffbbc0e9 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridgehub/experimental_bridge.t.sol @@ -0,0 +1,1810 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {stdStorage, StdStorage, Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {ChainCreationParams} from "contracts/state-transition/IChainTypeManager.sol"; +import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; +import {DummyChainTypeManagerWBH} from "contracts/dev-contracts/test/DummyChainTypeManagerWithBridgeHubAddress.sol"; +import {DummyZKChain} from "contracts/dev-contracts/test/DummyZKChain.sol"; +import {DummySharedBridge} from "contracts/dev-contracts/test/DummySharedBridge.sol"; +import {DummyBridgehubSetter} from "contracts/dev-contracts/test/DummyBridgehubSetter.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {IL1Nullifier} from "contracts/bridge/L1Nullifier.sol"; + +import {L2Message, L2Log, TxStatus, BridgehubL2TransactionRequest} from "contracts/common/Messaging.sol"; +import {L2_NATIVE_TOKEN_VAULT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; + +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {L2TransactionRequestTwoBridgesInner} from "contracts/bridgehub/IBridgehub.sol"; +import {ETH_TOKEN_ADDRESS, REQUIRED_L2_GAS_PRICE_PER_PUBDATA, MAX_NEW_FACTORY_DEPS, TWO_BRIDGES_MAGIC_VALUE} from "contracts/common/Config.sol"; +import {L1ERC20Bridge} from "contracts/bridge/L1ERC20Bridge.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {AssetIdNotSupported, ZeroChainId, ChainAlreadyLive, AssetIdAlreadyRegistered, AddressTooLow, ChainIdTooBig, WrongMagicValue, SharedBridgeNotSet, TokenNotRegistered, BridgeHubAlreadyRegistered, MsgValueMismatch, SlotOccupied, CTMAlreadyRegistered, TokenAlreadyRegistered, Unauthorized, NonEmptyMsgValue, CTMNotRegistered, InvalidChainId} from "contracts/common/L1ContractErrors.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; + +contract ExperimentalBridgeTest is Test { + using stdStorage for StdStorage; + + address weth; + Bridgehub bridgeHub; + DummyBridgehubSetter dummyBridgehub; + address public bridgeOwner; + address public testTokenAddress; + DummyChainTypeManagerWBH mockCTM; + DummyZKChain mockChainContract; + DummySharedBridge mockSharedBridge; + DummySharedBridge mockSecondSharedBridge; + L1AssetRouter sharedBridge; + address sharedBridgeAddress; + address secondBridgeAddress; + address l1NullifierAddress; + L1AssetRouter secondBridge; + TestnetERC20Token testToken; + L1NativeTokenVault ntv; + IMessageRoot messageRoot; + L1Nullifier l1Nullifier; + + bytes32 tokenAssetId; + + bytes32 private constant LOCK_FLAG_ADDRESS = 0x8e94fed44239eb2314ab7a406345e6c5a8f0ccedf3b600de3d004e672c33abf4; + + bytes32 ETH_TOKEN_ASSET_ID = + keccak256(abi.encode(block.chainid, L2_NATIVE_TOKEN_VAULT_ADDR, bytes32(uint256(uint160(ETH_TOKEN_ADDRESS))))); + + TestnetERC20Token testToken6; + TestnetERC20Token testToken8; + TestnetERC20Token testToken18; + + address mockL2Contract; + + uint256 eraChainId; + + address deployerAddress; + + event NewChain(uint256 indexed chainId, address chainTypeManager, address indexed chainGovernance); + + modifier useRandomToken(uint256 randomValue) { + _setRandomToken(randomValue); + + _; + } + + function _setRandomToken(uint256 randomValue) internal { + uint256 tokenIndex = randomValue % 3; + TestnetERC20Token token; + if (tokenIndex == 0) { + testToken = testToken18; + } else if (tokenIndex == 1) { + testToken = testToken6; + } else { + testToken = testToken8; + } + + tokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, address(testToken)); + } + + function setUp() public { + deployerAddress = makeAddr("DEPLOYER_ADDRESS"); + eraChainId = 320; + uint256 l1ChainId = block.chainid; + bridgeOwner = makeAddr("BRIDGE_OWNER"); + dummyBridgehub = new DummyBridgehubSetter(l1ChainId, bridgeOwner, type(uint256).max); + bridgeHub = Bridgehub(address(dummyBridgehub)); + weth = makeAddr("WETH"); + mockCTM = new DummyChainTypeManagerWBH(address(bridgeHub)); + mockChainContract = new DummyZKChain(address(bridgeHub), eraChainId, block.chainid); + + mockL2Contract = makeAddr("mockL2Contract"); + // mocks to use in bridges instead of using a dummy one + address mockL1WethAddress = makeAddr("Weth"); + address eraDiamondProxy = makeAddr("eraDiamondProxy"); + + l1Nullifier = new L1Nullifier(bridgeHub, eraChainId, eraDiamondProxy); + l1NullifierAddress = address(l1Nullifier); + + mockSharedBridge = new DummySharedBridge(keccak256("0xabc")); + mockSecondSharedBridge = new DummySharedBridge(keccak256("0xdef")); + + ntv = _deployNTV(address(mockSharedBridge)); + + mockSecondSharedBridge.setNativeTokenVault(ntv); + + testToken = new TestnetERC20Token("ZKSTT", "ZkSync Test Token", 18); + testTokenAddress = address(testToken); + ntv.registerToken(address(testToken)); + tokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, address(testToken)); + + messageRoot = new MessageRoot(bridgeHub); + + sharedBridge = new L1AssetRouter( + mockL1WethAddress, + address(bridgeHub), + l1NullifierAddress, + eraChainId, + eraDiamondProxy + ); + address defaultOwner = sharedBridge.owner(); + vm.prank(defaultOwner); + sharedBridge.transferOwnership(bridgeOwner); + vm.prank(bridgeOwner); + sharedBridge.acceptOwnership(); + + secondBridge = new L1AssetRouter( + mockL1WethAddress, + address(bridgeHub), + l1NullifierAddress, + eraChainId, + eraDiamondProxy + ); + defaultOwner = secondBridge.owner(); + vm.prank(defaultOwner); + secondBridge.transferOwnership(bridgeOwner); + vm.prank(bridgeOwner); + secondBridge.acceptOwnership(); + + sharedBridgeAddress = address(sharedBridge); + secondBridgeAddress = address(secondBridge); + testToken18 = new TestnetERC20Token("ZKSTT", "ZkSync Test Token", 18); + testToken6 = new TestnetERC20Token("USDC", "USD Coin", 6); + testToken8 = new TestnetERC20Token("WBTC", "Wrapped Bitcoin", 8); + + // test if the ownership of the bridgeHub is set correctly or not + defaultOwner = bridgeHub.owner(); + + // Now, the `reentrancyGuardInitializer` should prevent anyone from calling `initialize` since we have called the constructor of the contract + vm.expectRevert(SlotOccupied.selector); + bridgeHub.initialize(bridgeOwner); + + vm.store(address(mockChainContract), LOCK_FLAG_ADDRESS, bytes32(uint256(1))); + bytes32 bridgehubLocation = bytes32(uint256(36)); + vm.store(address(mockChainContract), bridgehubLocation, bytes32(uint256(uint160(address(bridgeHub))))); + bytes32 baseTokenGasPriceNominatorLocation = bytes32(uint256(40)); + vm.store(address(mockChainContract), baseTokenGasPriceNominatorLocation, bytes32(uint256(1))); + bytes32 baseTokenGasPriceDenominatorLocation = bytes32(uint256(41)); + vm.store(address(mockChainContract), baseTokenGasPriceDenominatorLocation, bytes32(uint256(1))); + // The ownership can only be transferred by the current owner to a new owner via the two-step approach + + // Default owner calls transferOwnership + vm.prank(defaultOwner); + bridgeHub.transferOwnership(bridgeOwner); + + // bridgeOwner calls acceptOwnership + vm.prank(bridgeOwner); + bridgeHub.acceptOwnership(); + + // Ownership should have changed + assertEq(bridgeHub.owner(), bridgeOwner); + } + + function _deployNTV(address _sharedBridgeAddr) internal returns (L1NativeTokenVault addr) { + L1NativeTokenVault ntvImpl = new L1NativeTokenVault(weth, _sharedBridgeAddr, eraChainId, l1Nullifier); + TransparentUpgradeableProxy ntvProxy = new TransparentUpgradeableProxy( + address(ntvImpl), + address(bridgeOwner), + abi.encodeCall(ntvImpl.initialize, (bridgeOwner, address(0))) + ); + addr = L1NativeTokenVault(payable(ntvProxy)); + + vm.prank(bridgeOwner); + L1AssetRouter(_sharedBridgeAddr).setNativeTokenVault(addr); + + addr.registerEthToken(); + } + + function _useFullSharedBridge() internal { + ntv = _deployNTV(address(sharedBridge)); + + secondBridgeAddress = address(sharedBridge); + } + + function _useMockSharedBridge() internal { + sharedBridgeAddress = address(mockSharedBridge); + } + + function _initializeBridgehub() internal { + vm.prank(bridgeOwner); + bridgeHub.setPendingAdmin(deployerAddress); + vm.prank(deployerAddress); + bridgeHub.acceptAdmin(); + + vm.startPrank(bridgeOwner); + bridgeHub.addChainTypeManager(address(mockCTM)); + bridgeHub.addTokenAssetId(tokenAssetId); + bridgeHub.setAddresses(sharedBridgeAddress, ICTMDeploymentTracker(address(0)), messageRoot); + vm.stopPrank(); + + vm.prank(l1Nullifier.owner()); + l1Nullifier.setL1NativeTokenVault(ntv); + vm.prank(l1Nullifier.owner()); + l1Nullifier.setL1AssetRouter(sharedBridgeAddress); + } + + function test_newPendingAdminReplacesPrevious(address randomDeployer, address otherRandomDeployer) public { + vm.assume(randomDeployer != address(0)); + vm.assume(otherRandomDeployer != address(0)); + assertEq(address(0), bridgeHub.admin()); + vm.assume(randomDeployer != otherRandomDeployer); + + vm.prank(bridgeHub.owner()); + bridgeHub.setPendingAdmin(randomDeployer); + + vm.prank(bridgeHub.owner()); + bridgeHub.setPendingAdmin(otherRandomDeployer); + + vm.prank(otherRandomDeployer); + bridgeHub.acceptAdmin(); + + assertEq(otherRandomDeployer, bridgeHub.admin()); + } + + function test_onlyPendingAdminCanAccept(address randomDeployer, address otherRandomDeployer) public { + vm.assume(randomDeployer != address(0)); + vm.assume(otherRandomDeployer != address(0)); + assertEq(address(0), bridgeHub.admin()); + vm.assume(randomDeployer != otherRandomDeployer); + + vm.prank(bridgeHub.owner()); + bridgeHub.setPendingAdmin(randomDeployer); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, otherRandomDeployer)); + vm.prank(otherRandomDeployer); + bridgeHub.acceptAdmin(); + + assertEq(address(0), bridgeHub.admin()); + } + + function test_onlyOwnerCanSetDeployer(address randomDeployer) public { + vm.assume(randomDeployer != address(0)); + assertEq(address(0), bridgeHub.admin()); + + vm.prank(bridgeHub.owner()); + bridgeHub.setPendingAdmin(randomDeployer); + vm.prank(randomDeployer); + bridgeHub.acceptAdmin(); + + assertEq(randomDeployer, bridgeHub.admin()); + } + + function test_randomCallerCannotSetDeployer(address randomCaller, address randomDeployer) public { + if (randomCaller != bridgeHub.owner() && randomCaller != bridgeHub.admin()) { + vm.prank(randomCaller); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomCaller)); + bridgeHub.setPendingAdmin(randomDeployer); + + // The deployer shouldn't have changed. + assertEq(address(0), bridgeHub.admin()); + } + } + + function test_addChainTypeManager(address randomAddressWithoutTheCorrectInterface) public { + vm.assume(randomAddressWithoutTheCorrectInterface != address(0)); + bool isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(!isCTMRegistered); + + vm.prank(bridgeOwner); + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(isCTMRegistered); + + // An address that has already been registered, cannot be registered again (at least not before calling `removeChainTypeManager`). + vm.prank(bridgeOwner); + vm.expectRevert(CTMAlreadyRegistered.selector); + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(isCTMRegistered); + } + + function test_addChainTypeManager_cannotBeCalledByRandomAddress( + address randomCaller, + address randomAddressWithoutTheCorrectInterface + ) public { + vm.assume(randomAddressWithoutTheCorrectInterface != address(0)); + bool isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(!isCTMRegistered); + + if (randomCaller != bridgeOwner) { + vm.prank(randomCaller); + vm.expectRevert(bytes("Ownable: caller is not the owner")); + + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + } + + vm.prank(bridgeOwner); + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(isCTMRegistered); + + // An address that has already been registered, cannot be registered again (at least not before calling `removeChainTypeManager`). + vm.prank(bridgeOwner); + vm.expectRevert(CTMAlreadyRegistered.selector); + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + + // Definitely not by a random caller + if (randomCaller != bridgeOwner) { + vm.prank(randomCaller); + vm.expectRevert("Ownable: caller is not the owner"); + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + } + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(isCTMRegistered); + } + + function test_removeChainTypeManager(address randomAddressWithoutTheCorrectInterface) public { + vm.assume(randomAddressWithoutTheCorrectInterface != address(0)); + bool isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(!isCTMRegistered); + + // A non-existent CTM cannot be removed + vm.prank(bridgeOwner); + vm.expectRevert(CTMNotRegistered.selector); + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + + // Let's first register our particular chainTypeManager + vm.prank(bridgeOwner); + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(isCTMRegistered); + + // Only an address that has already been registered, can be removed. + vm.prank(bridgeOwner); + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(!isCTMRegistered); + + // An already removed CTM cannot be removed again + vm.prank(bridgeOwner); + vm.expectRevert(CTMNotRegistered.selector); + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + } + + function test_removeChainTypeManager_cannotBeCalledByRandomAddress( + address randomAddressWithoutTheCorrectInterface, + address randomCaller + ) public { + vm.assume(randomAddressWithoutTheCorrectInterface != address(0)); + bool isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(!isCTMRegistered); + + if (randomCaller != bridgeOwner) { + vm.prank(randomCaller); + vm.expectRevert(bytes("Ownable: caller is not the owner")); + + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + } + + // A non-existent CTM cannot be removed + vm.prank(bridgeOwner); + vm.expectRevert(CTMNotRegistered.selector); + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + + // Let's first register our particular chainTypeManager + vm.prank(bridgeOwner); + bridgeHub.addChainTypeManager(randomAddressWithoutTheCorrectInterface); + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(isCTMRegistered); + + // Only an address that has already been registered, can be removed. + vm.prank(bridgeOwner); + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + + isCTMRegistered = bridgeHub.chainTypeManagerIsRegistered(randomAddressWithoutTheCorrectInterface); + assertTrue(!isCTMRegistered); + + // An already removed CTM cannot be removed again + vm.prank(bridgeOwner); + vm.expectRevert(CTMNotRegistered.selector); + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + + // Not possible by a randomcaller as well + if (randomCaller != bridgeOwner) { + vm.prank(randomCaller); + vm.expectRevert(bytes("Ownable: caller is not the owner")); + bridgeHub.removeChainTypeManager(randomAddressWithoutTheCorrectInterface); + } + } + + function test_addAssetId(address randomAddress) public { + vm.startPrank(bridgeOwner); + bridgeHub.setAddresses(address(mockSharedBridge), ICTMDeploymentTracker(address(0)), IMessageRoot(address(0))); + vm.stopPrank(); + + bytes32 assetId = DataEncoding.encodeNTVAssetId(block.chainid, testTokenAddress); + assertTrue(!bridgeHub.assetIdIsRegistered(assetId), "This random address is not registered as a token"); + + vm.prank(bridgeOwner); + bridgeHub.addTokenAssetId(assetId); + + assertTrue( + bridgeHub.assetIdIsRegistered(assetId), + "after call from the bridgeowner, this randomAddress should be a registered token" + ); + + if (randomAddress != address(testTokenAddress)) { + assetId = DataEncoding.encodeNTVAssetId(block.chainid, address(randomAddress)); + vm.assume(!bridgeHub.assetIdIsRegistered(assetId)); + // Testing to see if a random address can also be added or not + vm.prank(bridgeOwner); + bridgeHub.addTokenAssetId(assetId); + assertTrue(bridgeHub.assetIdIsRegistered(assetId)); + } + + // An already registered token cannot be registered again + vm.prank(bridgeOwner); + vm.expectRevert(AssetIdAlreadyRegistered.selector); + bridgeHub.addTokenAssetId(assetId); + } + + function test_addAssetId_cannotBeCalledByRandomAddress( + address randomCaller, + uint256 randomValue + ) public useRandomToken(randomValue) { + vm.startPrank(bridgeOwner); + bridgeHub.setAddresses(address(mockSharedBridge), ICTMDeploymentTracker(address(0)), IMessageRoot(address(0))); + vm.stopPrank(); + + bytes32 assetId = DataEncoding.encodeNTVAssetId(block.chainid, testTokenAddress); + + vm.assume(randomCaller != bridgeOwner); + vm.assume(randomCaller != bridgeHub.admin()); + vm.prank(randomCaller); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomCaller)); + bridgeHub.addTokenAssetId(assetId); + + assertTrue(!bridgeHub.assetIdIsRegistered(assetId), "This random address is not registered as a token"); + + vm.prank(bridgeOwner); + bridgeHub.addTokenAssetId(assetId); + + assertTrue( + bridgeHub.assetIdIsRegistered(assetId), + "after call from the bridgeowner, this testTokenAddress should be a registered token" + ); + + // An already registered token cannot be registered again by randomCaller + if (randomCaller != bridgeOwner) { + vm.prank(bridgeOwner); + vm.expectRevert(AssetIdAlreadyRegistered.selector); + bridgeHub.addTokenAssetId(assetId); + } + } + + function test_setAddresses(address randomAssetRouter, address randomCTMDeployer, address randomMessageRoot) public { + assertTrue(bridgeHub.sharedBridge() == address(0), "Shared bridge is already there"); + assertTrue(bridgeHub.l1CtmDeployer() == ICTMDeploymentTracker(address(0)), "L1 CTM deployer is already there"); + assertTrue(bridgeHub.messageRoot() == IMessageRoot(address(0)), "Message root is already there"); + + vm.prank(bridgeOwner); + bridgeHub.setAddresses( + randomAssetRouter, + ICTMDeploymentTracker(randomCTMDeployer), + IMessageRoot(randomMessageRoot) + ); + + assertTrue(bridgeHub.sharedBridge() == randomAssetRouter, "Shared bridge is already there"); + assertTrue( + bridgeHub.l1CtmDeployer() == ICTMDeploymentTracker(randomCTMDeployer), + "L1 CTM deployer is already there" + ); + assertTrue(bridgeHub.messageRoot() == IMessageRoot(randomMessageRoot), "Message root is already there"); + } + + function test_setAddresses_cannotBeCalledByRandomAddress( + address randomCaller, + address randomAssetRouter, + address randomCTMDeployer, + address randomMessageRoot + ) public { + vm.assume(randomCaller != bridgeOwner); + + vm.prank(randomCaller); + vm.expectRevert(bytes("Ownable: caller is not the owner")); + bridgeHub.setAddresses( + randomAssetRouter, + ICTMDeploymentTracker(randomCTMDeployer), + IMessageRoot(randomMessageRoot) + ); + + assertTrue(bridgeHub.sharedBridge() == address(0), "Shared bridge is already there"); + assertTrue(bridgeHub.l1CtmDeployer() == ICTMDeploymentTracker(address(0)), "L1 CTM deployer is already there"); + assertTrue(bridgeHub.messageRoot() == IMessageRoot(address(0)), "Message root is already there"); + } + + uint256 newChainId; + address admin; + + function test_pause_createNewChain( + uint256 chainId, + uint256 salt, + uint256 randomValue + ) public useRandomToken(randomValue) { + chainId = bound(chainId, 1, type(uint48).max); + vm.assume(chainId != block.chainid); + + admin = makeAddr("NEW_CHAIN_ADMIN"); + + vm.prank(bridgeOwner); + bridgeHub.pause(); + vm.prank(bridgeOwner); + bridgeHub.setPendingAdmin(deployerAddress); + vm.prank(deployerAddress); + bridgeHub.acceptAdmin(); + + // ntv.registerToken(address(testToken)); + + // bytes32 tokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, address(testToken)); + + // vm.prank(deployerAddress); + // bridgehub.addTokenAssetId(tokenAssetId); + + vm.expectRevert("Pausable: paused"); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + + vm.prank(bridgeOwner); + bridgeHub.unpause(); + + vm.expectRevert(CTMNotRegistered.selector); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + } + + function test_RevertWhen_CTMNotRegisteredOnCreate( + uint256 chainId, + uint256 salt, + uint256 randomValue + ) public useRandomToken(randomValue) { + chainId = bound(chainId, 1, type(uint48).max); + vm.assume(chainId != block.chainid); + + admin = makeAddr("NEW_CHAIN_ADMIN"); + + vm.prank(bridgeOwner); + bridgeHub.setPendingAdmin(deployerAddress); + vm.prank(deployerAddress); + bridgeHub.acceptAdmin(); + + chainId = bound(chainId, 1, type(uint48).max); + vm.expectRevert(CTMNotRegistered.selector); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + } + + function test_RevertWhen_wrongChainIdOnCreate( + uint256 chainId, + uint256 salt, + uint256 randomValue + ) public useRandomToken(randomValue) { + chainId = bound(chainId, 1, type(uint48).max); + vm.assume(chainId != block.chainid); + + admin = makeAddr("NEW_CHAIN_ADMIN"); + + vm.prank(bridgeOwner); + bridgeHub.setPendingAdmin(deployerAddress); + vm.prank(deployerAddress); + bridgeHub.acceptAdmin(); + + chainId = bound(chainId, type(uint48).max + uint256(1), type(uint256).max); + vm.expectRevert(ChainIdTooBig.selector); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + + chainId = 0; + vm.expectRevert(ZeroChainId.selector); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + } + + function test_RevertWhen_assetIdNotRegistered( + uint256 chainId, + uint256 salt, + uint256 randomValue + ) public useRandomToken(randomValue) { + chainId = bound(chainId, 1, type(uint48).max); + vm.assume(chainId != block.chainid); + + admin = makeAddr("NEW_CHAIN_ADMIN"); + + vm.prank(bridgeOwner); + bridgeHub.setPendingAdmin(deployerAddress); + vm.prank(deployerAddress); + bridgeHub.acceptAdmin(); + + vm.startPrank(bridgeOwner); + bridgeHub.addChainTypeManager(address(mockCTM)); + vm.stopPrank(); + + vm.expectRevert(abi.encodeWithSelector(AssetIdNotSupported.selector, tokenAssetId)); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + } + + function test_RevertWhen_wethBridgeNotSet( + uint256 chainId, + uint256 salt, + uint256 randomValue + ) public useRandomToken(randomValue) { + chainId = bound(chainId, 1, type(uint48).max); + vm.assume(chainId != block.chainid); + admin = makeAddr("NEW_CHAIN_ADMIN"); + + vm.prank(bridgeOwner); + bridgeHub.setPendingAdmin(deployerAddress); + vm.prank(deployerAddress); + bridgeHub.acceptAdmin(); + + vm.startPrank(bridgeOwner); + bridgeHub.addChainTypeManager(address(mockCTM)); + bridgeHub.addTokenAssetId(tokenAssetId); + vm.stopPrank(); + + vm.expectRevert(SharedBridgeNotSet.selector); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + } + + function test_RevertWhen_chainIdAlreadyRegistered( + uint256 chainId, + uint256 salt, + uint256 randomValue + ) public useRandomToken(randomValue) { + admin = makeAddr("NEW_CHAIN_ADMIN"); + + _initializeBridgehub(); + + chainId = bound(chainId, 1, type(uint48).max); + vm.assume(chainId != block.chainid); + stdstore.target(address(bridgeHub)).sig("chainTypeManager(uint256)").with_key(chainId).checked_write( + address(mockCTM) + ); + + vm.expectRevert(BridgeHubAlreadyRegistered.selector); + vm.prank(deployerAddress); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: new bytes[](0) + }); + } + + function test_createNewChain( + address randomCaller, + uint256 chainId, + bytes memory mockInitCalldata, + bytes[] memory factoryDeps, + uint256 salt, + uint256 randomValue, + address newChainAddress + ) public useRandomToken(randomValue) { + admin = makeAddr("NEW_CHAIN_ADMIN"); + chainId = bound(chainId, 1, type(uint48).max); + vm.assume(chainId != block.chainid); + vm.assume(randomCaller != deployerAddress && randomCaller != bridgeOwner); + + _initializeBridgehub(); + + vm.prank(randomCaller); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomCaller)); + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: salt, + _admin: admin, + _initData: bytes(""), + _factoryDeps: factoryDeps + }); + + vm.prank(mockCTM.owner()); + + // bridgeHub.createNewChain => chainTypeManager.createNewChain => this function sets the stateTransition mapping + // of `chainId`, let's emulate that using foundry cheatcodes or let's just use the extra function we introduced in our mockCTM + mockCTM.setZKChain(chainId, address(mockChainContract)); + + vm.startPrank(deployerAddress); + vm.mockCall( + address(mockCTM), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + mockCTM.createNewChain.selector, + chainId, + tokenAssetId, + admin, + mockInitCalldata, + factoryDeps + ), + abi.encode(newChainAddress) + ); + + vm.expectEmit(true, true, true, true, address(bridgeHub)); + emit NewChain(chainId, address(mockCTM), admin); + + bridgeHub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(mockCTM), + _baseTokenAssetId: tokenAssetId, + _salt: uint256(chainId * 2), + _admin: admin, + _initData: mockInitCalldata, + _factoryDeps: factoryDeps + }); + + vm.stopPrank(); + vm.clearMockedCalls(); + + assertTrue(bridgeHub.chainTypeManager(chainId) == address(mockCTM)); + assertTrue(bridgeHub.baseTokenAssetId(chainId) == tokenAssetId); + assertTrue(bridgeHub.getZKChain(chainId) == newChainAddress); + } + + function test_proveL2MessageInclusion( + uint256 mockChainId, + uint256 mockBatchNumber, + uint256 mockIndex, + bytes32[] memory mockProof, + uint16 randomTxNumInBatch, + address randomSender, + bytes memory randomData + ) public { + mockChainId = _setUpZKChainForChainId(mockChainId); + + // Now the following statements should be true as well: + assertTrue(bridgeHub.chainTypeManager(mockChainId) == address(mockCTM)); + assertTrue(bridgeHub.getZKChain(mockChainId) == address(mockChainContract)); + + // Creating a random L2Message::l2Message so that we pass the correct parameters to `proveL2MessageInclusion` + L2Message memory l2Message = _createMockL2Message(randomTxNumInBatch, randomSender, randomData); + + // Since we have used random data for the `bridgeHub.proveL2MessageInclusion` function which basically forwards the call + // to the same function in the mailbox, we will mock the call to the mailbox to return true and see if it works. + vm.mockCall( + address(mockChainContract), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + mockChainContract.proveL2MessageInclusion.selector, + mockBatchNumber, + mockIndex, + l2Message, + mockProof + ), + abi.encode(true) + ); + + assertTrue( + bridgeHub.proveL2MessageInclusion({ + _chainId: mockChainId, + _batchNumber: mockBatchNumber, + _index: mockIndex, + _message: l2Message, + _proof: mockProof + }) + ); + vm.clearMockedCalls(); + } + + function test_proveL2LogInclusion( + uint256 mockChainId, + uint256 mockBatchNumber, + uint256 mockIndex, + bytes32[] memory mockProof, + uint8 randomL2ShardId, + bool randomIsService, + uint16 randomTxNumInBatch, + address randomSender, + bytes32 randomKey, + bytes32 randomValue + ) public { + mockChainId = _setUpZKChainForChainId(mockChainId); + + // Now the following statements should be true as well: + assertTrue(bridgeHub.chainTypeManager(mockChainId) == address(mockCTM)); + assertTrue(bridgeHub.getZKChain(mockChainId) == address(mockChainContract)); + + // Creating a random L2Log::l2Log so that we pass the correct parameters to `proveL2LogInclusion` + L2Log memory l2Log = _createMockL2Log({ + randomL2ShardId: randomL2ShardId, + randomIsService: randomIsService, + randomTxNumInBatch: randomTxNumInBatch, + randomSender: randomSender, + randomKey: randomKey, + randomValue: randomValue + }); + + // Since we have used random data for the `bridgeHub.proveL2LogInclusion` function which basically forwards the call + // to the same function in the mailbox, we will mock the call to the mailbox to return true and see if it works. + vm.mockCall( + address(mockChainContract), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + mockChainContract.proveL2LogInclusion.selector, + mockBatchNumber, + mockIndex, + l2Log, + mockProof + ), + abi.encode(true) + ); + + assertTrue( + bridgeHub.proveL2LogInclusion({ + _chainId: mockChainId, + _batchNumber: mockBatchNumber, + _index: mockIndex, + _log: l2Log, + _proof: mockProof + }) + ); + vm.clearMockedCalls(); + } + + function test_proveL1ToL2TransactionStatus( + uint256 randomChainId, + bytes32 randomL2TxHash, + uint256 randomL2BatchNumber, + uint256 randomL2MessageIndex, + uint16 randomL2TxNumberInBatch, + bytes32[] memory randomMerkleProof, + bool randomResultantBool, + bool txStatusBool + ) public { + randomChainId = _setUpZKChainForChainId(randomChainId); + + TxStatus txStatus; + + if (txStatusBool) { + txStatus = TxStatus.Failure; + } else { + txStatus = TxStatus.Success; + } + + vm.mockCall( + address(mockChainContract), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + mockChainContract.proveL1ToL2TransactionStatus.selector, + randomL2TxHash, + randomL2BatchNumber, + randomL2MessageIndex, + randomL2TxNumberInBatch, + randomMerkleProof, + txStatus + ), + abi.encode(randomResultantBool) + ); + + assertTrue( + bridgeHub.proveL1ToL2TransactionStatus({ + _chainId: randomChainId, + _l2TxHash: randomL2TxHash, + _l2BatchNumber: randomL2BatchNumber, + _l2MessageIndex: randomL2MessageIndex, + _l2TxNumberInBatch: randomL2TxNumberInBatch, + _merkleProof: randomMerkleProof, + _status: txStatus + }) == randomResultantBool + ); + } + + function test_l2TransactionBaseCost( + uint256 mockChainId, + uint256 mockGasPrice, + uint256 mockL2GasLimit, + uint256 mockL2GasPerPubdataByteLimit, + uint256 mockL2TxnCost + ) public { + mockChainId = _setUpZKChainForChainId(mockChainId); + + vm.mockCall( + address(mockChainContract), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + mockChainContract.l2TransactionBaseCost.selector, + mockGasPrice, + mockL2GasLimit, + mockL2GasPerPubdataByteLimit + ), + abi.encode(mockL2TxnCost) + ); + + assertTrue( + bridgeHub.l2TransactionBaseCost(mockChainId, mockGasPrice, mockL2GasLimit, mockL2GasPerPubdataByteLimit) == + mockL2TxnCost + ); + vm.clearMockedCalls(); + } + + function _prepareETHL2TransactionDirectRequest( + uint256 mockChainId, + uint256 mockMintValue, + address mockL2Contract, + uint256 mockL2Value, + bytes memory mockL2Calldata, + uint256 mockL2GasLimit, + uint256 mockL2GasPerPubdataByteLimit, + bytes[] memory mockFactoryDeps, + address randomCaller + ) internal returns (L2TransactionRequestDirect memory l2TxnReqDirect, bytes32 canonicalHash) { + vm.assume(mockFactoryDeps.length <= MAX_NEW_FACTORY_DEPS); + + l2TxnReqDirect = _createMockL2TransactionRequestDirect({ + mockChainId: mockChainId, + mockMintValue: mockMintValue, + mockL2Contract: mockL2Contract, + mockL2Value: mockL2Value, + mockL2Calldata: mockL2Calldata, + mockL2GasLimit: mockL2GasLimit, + mockL2GasPerPubdataByteLimit: mockL2GasPerPubdataByteLimit, + mockFactoryDeps: mockFactoryDeps, + mockRefundRecipient: address(0) + }); + + l2TxnReqDirect.chainId = _setUpZKChainForChainId(l2TxnReqDirect.chainId); + + assertTrue(bridgeHub.baseTokenAssetId(l2TxnReqDirect.chainId) != ETH_TOKEN_ASSET_ID); + _setUpBaseTokenForChainId(l2TxnReqDirect.chainId, true, address(0)); + + assertTrue(bridgeHub.baseTokenAssetId(l2TxnReqDirect.chainId) == ETH_TOKEN_ASSET_ID); + console.log(IL1AssetRouter(bridgeHub.sharedBridge()).assetHandlerAddress(ETH_TOKEN_ASSET_ID)); + assertTrue(bridgeHub.baseToken(l2TxnReqDirect.chainId) == ETH_TOKEN_ADDRESS); + + assertTrue(bridgeHub.getZKChain(l2TxnReqDirect.chainId) == address(mockChainContract)); + canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); + + vm.mockCall( + address(mockChainContract), + abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), + abi.encode(canonicalHash) + ); + + mockChainContract.setFeeParams(); + mockChainContract.setBaseTokenGasMultiplierPrice(uint128(1), uint128(1)); + mockChainContract.setBridgeHubAddress(address(bridgeHub)); + assertTrue(mockChainContract.getBridgeHubAddress() == address(bridgeHub)); + } + + function test_requestL2TransactionDirect_RevertWhen_incorrectETHParams( + uint256 mockChainId, + uint256 mockMintValue, + address mockL2Contract, + uint256 mockL2Value, + uint256 msgValue, + bytes memory mockL2Calldata, + uint256 mockL2GasLimit, + uint256 mockL2GasPerPubdataByteLimit, + bytes[] memory mockFactoryDeps + ) public { + _useMockSharedBridge(); + _initializeBridgehub(); + + address randomCaller = makeAddr("RANDOM_CALLER"); + vm.assume(msgValue != mockMintValue); + + (L2TransactionRequestDirect memory l2TxnReqDirect, bytes32 hash) = _prepareETHL2TransactionDirectRequest({ + mockChainId: mockChainId, + mockMintValue: mockMintValue, + mockL2Contract: mockL2Contract, + mockL2Value: mockL2Value, + mockL2Calldata: mockL2Calldata, + mockL2GasLimit: mockL2GasLimit, + mockL2GasPerPubdataByteLimit: mockL2GasPerPubdataByteLimit, + mockFactoryDeps: mockFactoryDeps, + randomCaller: randomCaller + }); + + vm.deal(randomCaller, msgValue); + vm.expectRevert(abi.encodeWithSelector(MsgValueMismatch.selector, mockMintValue, msgValue)); + vm.prank(randomCaller); + bridgeHub.requestL2TransactionDirect{value: msgValue}(l2TxnReqDirect); + } + + function test_requestL2TransactionDirect_ETHCase( + uint256 mockChainId, + uint256 mockMintValue, + address mockL2Contract, + uint256 mockL2Value, + bytes memory mockL2Calldata, + uint256 mockL2GasLimit, + uint256 mockL2GasPerPubdataByteLimit, + bytes[] memory mockFactoryDeps, + uint256 gasPrice + ) public { + _useMockSharedBridge(); + _initializeBridgehub(); + + address randomCaller = makeAddr("RANDOM_CALLER"); + mockChainId = bound(mockChainId, 1, type(uint48).max); + + (L2TransactionRequestDirect memory l2TxnReqDirect, bytes32 hash) = _prepareETHL2TransactionDirectRequest({ + mockChainId: mockChainId, + mockMintValue: mockMintValue, + mockL2Contract: mockL2Contract, + mockL2Value: mockL2Value, + mockL2Calldata: mockL2Calldata, + mockL2GasLimit: mockL2GasLimit, + mockL2GasPerPubdataByteLimit: mockL2GasPerPubdataByteLimit, + mockFactoryDeps: mockFactoryDeps, + randomCaller: randomCaller + }); + + vm.deal(randomCaller, l2TxnReqDirect.mintValue); + gasPrice = bound(gasPrice, 1_000, 50_000_000); + vm.txGasPrice(gasPrice * 1 gwei); + vm.prank(randomCaller); + bytes32 resultantHash = bridgeHub.requestL2TransactionDirect{value: randomCaller.balance}(l2TxnReqDirect); + + assertTrue(resultantHash == hash); + } + + function test_requestL2TransactionDirect_NonETHCase( + uint256 mockChainId, + uint256 mockMintValue, + address mockL2Contract, + uint256 mockL2Value, + bytes memory mockL2Calldata, + uint256 mockL2GasLimit, + uint256 mockL2GasPerPubdataByteLimit, + bytes[] memory mockFactoryDeps, + uint256 gasPrice, + uint256 randomValue + ) public useRandomToken(randomValue) { + _useFullSharedBridge(); + _initializeBridgehub(); + + address randomCaller = makeAddr("RANDOM_CALLER"); + mockChainId = bound(mockChainId, 1, type(uint48).max); + + vm.assume(mockFactoryDeps.length <= MAX_NEW_FACTORY_DEPS); + vm.assume(mockMintValue > 0); + + L2TransactionRequestDirect memory l2TxnReqDirect = _createMockL2TransactionRequestDirect({ + mockChainId: mockChainId, + mockMintValue: mockMintValue, + mockL2Contract: mockL2Contract, + mockL2Value: mockL2Value, + mockL2Calldata: mockL2Calldata, + mockL2GasLimit: mockL2GasLimit, + mockL2GasPerPubdataByteLimit: mockL2GasPerPubdataByteLimit, + mockFactoryDeps: mockFactoryDeps, + mockRefundRecipient: address(0) + }); + + l2TxnReqDirect.chainId = _setUpZKChainForChainId(l2TxnReqDirect.chainId); + + _setUpBaseTokenForChainId(l2TxnReqDirect.chainId, false, address(testToken)); + + assertTrue(bridgeHub.getZKChain(l2TxnReqDirect.chainId) == address(mockChainContract)); + bytes32 canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); + + vm.mockCall( + address(mockChainContract), + abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), + abi.encode(canonicalHash) + ); + + mockChainContract.setFeeParams(); + mockChainContract.setBaseTokenGasMultiplierPrice(uint128(1), uint128(1)); + mockChainContract.setBridgeHubAddress(address(bridgeHub)); + assertTrue(mockChainContract.getBridgeHubAddress() == address(bridgeHub)); + + gasPrice = bound(gasPrice, 1_000, 50_000_000); + vm.txGasPrice(gasPrice * 1 gwei); + + vm.deal(randomCaller, 1 ether); + vm.prank(randomCaller); + vm.expectRevert(abi.encodeWithSelector(MsgValueMismatch.selector, 0, randomCaller.balance)); + bytes32 resultantHash = bridgeHub.requestL2TransactionDirect{value: randomCaller.balance}(l2TxnReqDirect); + + // Now, let's call the same function with zero msg.value + testToken.mint(randomCaller, l2TxnReqDirect.mintValue); + assertEq(testToken.balanceOf(randomCaller), l2TxnReqDirect.mintValue); + + vm.prank(randomCaller); + testToken.transfer(address(this), l2TxnReqDirect.mintValue); + assertEq(testToken.balanceOf(address(this)), l2TxnReqDirect.mintValue); + testToken.approve(sharedBridgeAddress, l2TxnReqDirect.mintValue); + + resultantHash = bridgeHub.requestL2TransactionDirect(l2TxnReqDirect); + + assertEq(canonicalHash, resultantHash); + } + + function test_requestTransactionTwoBridgesChecksMagicValue( + uint256 chainId, + uint256 mintValue, + uint256 l2Value, + uint256 l2GasLimit, + uint256 l2GasPerPubdataByteLimit, + address refundRecipient, + uint256 secondBridgeValue, + bytes memory secondBridgeCalldata, + bytes32 magicValue + ) public { + _useMockSharedBridge(); + _initializeBridgehub(); + + vm.assume(magicValue != TWO_BRIDGES_MAGIC_VALUE); + + chainId = bound(chainId, 1, type(uint48).max); + + L2TransactionRequestTwoBridgesOuter memory l2TxnReq2BridgeOut = _createMockL2TransactionRequestTwoBridgesOuter({ + chainId: chainId, + mintValue: mintValue, + l2Value: l2Value, + l2GasLimit: l2GasLimit, + l2GasPerPubdataByteLimit: l2GasPerPubdataByteLimit, + refundRecipient: refundRecipient, + secondBridgeValue: secondBridgeValue, + secondBridgeCalldata: secondBridgeCalldata + }); + + l2TxnReq2BridgeOut.chainId = _setUpZKChainForChainId(l2TxnReq2BridgeOut.chainId); + + _setUpBaseTokenForChainId(l2TxnReq2BridgeOut.chainId, true, address(0)); + assertTrue(bridgeHub.baseToken(l2TxnReq2BridgeOut.chainId) == ETH_TOKEN_ADDRESS); + + assertTrue(bridgeHub.getZKChain(l2TxnReq2BridgeOut.chainId) == address(mockChainContract)); + + uint256 callerMsgValue = l2TxnReq2BridgeOut.mintValue + l2TxnReq2BridgeOut.secondBridgeValue; + address randomCaller = makeAddr("RANDOM_CALLER"); + vm.deal(randomCaller, callerMsgValue); + + L2TransactionRequestTwoBridgesInner memory request = L2TransactionRequestTwoBridgesInner({ + magicValue: magicValue, + l2Contract: makeAddr("L2_CONTRACT"), + l2Calldata: new bytes(0), + factoryDeps: new bytes[](0), + txDataHash: bytes32(0) + }); + + vm.mockCall( + secondBridgeAddress, + abi.encodeWithSelector(IL1AssetRouter.bridgehubDeposit.selector), + abi.encode(request) + ); + + vm.expectRevert(abi.encodeWithSelector(WrongMagicValue.selector, TWO_BRIDGES_MAGIC_VALUE, magicValue)); + vm.prank(randomCaller); + bridgeHub.requestL2TransactionTwoBridges{value: randomCaller.balance}(l2TxnReq2BridgeOut); + } + + function test_requestL2TransactionTwoBridgesWrongBridgeAddress( + uint256 chainId, + uint256 mintValue, + uint256 msgValue, + uint256 l2Value, + uint256 l2GasLimit, + uint256 l2GasPerPubdataByteLimit, + address refundRecipient, + uint256 secondBridgeValue, + uint160 secondBridgeAddressValue, + bytes memory secondBridgeCalldata + ) public { + _useMockSharedBridge(); + _initializeBridgehub(); + + chainId = bound(chainId, 1, type(uint48).max); + + L2TransactionRequestTwoBridgesOuter memory l2TxnReq2BridgeOut = _createMockL2TransactionRequestTwoBridgesOuter({ + chainId: chainId, + mintValue: mintValue, + l2Value: l2Value, + l2GasLimit: l2GasLimit, + l2GasPerPubdataByteLimit: l2GasPerPubdataByteLimit, + refundRecipient: refundRecipient, + secondBridgeValue: secondBridgeValue, + secondBridgeCalldata: secondBridgeCalldata + }); + + l2TxnReq2BridgeOut.chainId = _setUpZKChainForChainId(l2TxnReq2BridgeOut.chainId); + + _setUpBaseTokenForChainId(l2TxnReq2BridgeOut.chainId, true, address(0)); + assertTrue(bridgeHub.baseToken(l2TxnReq2BridgeOut.chainId) == ETH_TOKEN_ADDRESS); + + assertTrue(bridgeHub.getZKChain(l2TxnReq2BridgeOut.chainId) == address(mockChainContract)); + + uint256 callerMsgValue = l2TxnReq2BridgeOut.mintValue + l2TxnReq2BridgeOut.secondBridgeValue; + address randomCaller = makeAddr("RANDOM_CALLER"); + vm.deal(randomCaller, callerMsgValue); + + mockChainContract.setBridgeHubAddress(address(bridgeHub)); + + bytes32 canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); + + vm.mockCall( + address(mockChainContract), + abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), + abi.encode(canonicalHash) + ); + + L2TransactionRequestTwoBridgesInner memory outputRequest = L2TransactionRequestTwoBridgesInner({ + magicValue: TWO_BRIDGES_MAGIC_VALUE, + l2Contract: address(0), + l2Calldata: abi.encode(""), + factoryDeps: new bytes[](0), + txDataHash: bytes32("") + }); + secondBridgeAddressValue = uint160(bound(uint256(secondBridgeAddressValue), 0, uint256(type(uint16).max))); + address secondBridgeAddress = address(secondBridgeAddressValue); + + vm.mockCall( + address(secondBridgeAddressValue), + l2TxnReq2BridgeOut.secondBridgeValue, + abi.encodeWithSelector( + IL1AssetRouter.bridgehubDeposit.selector, + l2TxnReq2BridgeOut.chainId, + randomCaller, + l2TxnReq2BridgeOut.l2Value, + l2TxnReq2BridgeOut.secondBridgeCalldata + ), + abi.encode(outputRequest) + ); + + l2TxnReq2BridgeOut.secondBridgeAddress = address(secondBridgeAddressValue); + vm.expectRevert(abi.encodeWithSelector(AddressTooLow.selector, secondBridgeAddress)); + vm.prank(randomCaller); + bridgeHub.requestL2TransactionTwoBridges{value: randomCaller.balance}(l2TxnReq2BridgeOut); + } + + function test_requestL2TransactionTwoBridges_ERC20ToNonBase( + uint256 chainId, + uint256 mintValue, + uint256 l2Value, + uint256 l2GasLimit, + uint256 l2GasPerPubdataByteLimit, + address l2Receiver, + uint256 randomValue + ) public useRandomToken(randomValue) { + _useFullSharedBridge(); + _initializeBridgehub(); + vm.assume(mintValue > 0); + + // create another token, to avoid base token + TestnetERC20Token erc20Token = new TestnetERC20Token("ZKESTT", "ZkSync ERC Test Token", 18); + address erc20TokenAddress = address(erc20Token); + l2Value = bound(l2Value, 1, type(uint256).max); + bytes memory secondBridgeCalldata = abi.encode(erc20TokenAddress, l2Value, l2Receiver); + + chainId = _setUpZKChainForChainId(chainId); + + L2TransactionRequestTwoBridgesOuter memory l2TxnReq2BridgeOut = _createMockL2TransactionRequestTwoBridgesOuter({ + chainId: chainId, + mintValue: mintValue, + l2Value: 0, // not used + l2GasLimit: l2GasLimit, + l2GasPerPubdataByteLimit: l2GasPerPubdataByteLimit, + refundRecipient: address(0), + secondBridgeValue: 0, // not used cause we are using ERC20 + secondBridgeCalldata: secondBridgeCalldata + }); + + address randomCaller = makeAddr("RANDOM_CALLER"); + bytes32 canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); + + _setUpBaseTokenForChainId(l2TxnReq2BridgeOut.chainId, false, address(testToken)); + assertTrue(bridgeHub.baseToken(l2TxnReq2BridgeOut.chainId) == address(testToken)); + assertTrue(bridgeHub.getZKChain(l2TxnReq2BridgeOut.chainId) == address(mockChainContract)); + mockChainContract.setBridgeHubAddress(address(bridgeHub)); + + vm.mockCall( + address(mockChainContract), + abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), + abi.encode(canonicalHash) + ); + + testToken.mint(randomCaller, l2TxnReq2BridgeOut.mintValue); + erc20Token.mint(randomCaller, l2Value); + + assertEq(testToken.balanceOf(randomCaller), l2TxnReq2BridgeOut.mintValue); + assertEq(erc20Token.balanceOf(randomCaller), l2Value); + + vm.startPrank(randomCaller); + testToken.approve(sharedBridgeAddress, l2TxnReq2BridgeOut.mintValue); + erc20Token.approve(secondBridgeAddress, l2Value); + vm.stopPrank(); + vm.prank(randomCaller); + bytes32 resultHash = bridgeHub.requestL2TransactionTwoBridges(l2TxnReq2BridgeOut); + assertEq(resultHash, canonicalHash); + + assertEq(erc20Token.balanceOf(randomCaller), 0); + assertEq(testToken.balanceOf(randomCaller), 0); + assertEq(erc20Token.balanceOf(address(ntv)), l2Value); + assertEq(testToken.balanceOf(address(ntv)), l2TxnReq2BridgeOut.mintValue); + + l2TxnReq2BridgeOut.secondBridgeValue = 1; + testToken.mint(randomCaller, l2TxnReq2BridgeOut.mintValue); + vm.startPrank(randomCaller); + testToken.approve(sharedBridgeAddress, l2TxnReq2BridgeOut.mintValue); + vm.expectRevert(abi.encodeWithSelector(MsgValueMismatch.selector, l2TxnReq2BridgeOut.secondBridgeValue, 0)); + bridgeHub.requestL2TransactionTwoBridges(l2TxnReq2BridgeOut); + vm.stopPrank(); + } + + function test_requestL2TransactionTwoBridges_ETHToNonBase( + uint256 chainId, + uint256 mintValue, + uint256 msgValue, + uint256 l2Value, + uint256 l2GasLimit, + uint256 l2GasPerPubdataByteLimit, + address refundRecipient, + uint256 secondBridgeValue, + address l2Receiver, + uint256 randomValue + ) public useRandomToken(randomValue) { + _useFullSharedBridge(); + _initializeBridgehub(); + vm.assume(mintValue > 0); + + secondBridgeValue = bound(secondBridgeValue, 1, type(uint256).max); + bytes memory secondBridgeCalldata = abi.encode(ETH_TOKEN_ADDRESS, 0, l2Receiver); + + chainId = _setUpZKChainForChainId(chainId); + + L2TransactionRequestTwoBridgesOuter memory l2TxnReq2BridgeOut = _createMockL2TransactionRequestTwoBridgesOuter({ + chainId: chainId, + mintValue: mintValue, + l2Value: l2Value, + l2GasLimit: l2GasLimit, + l2GasPerPubdataByteLimit: l2GasPerPubdataByteLimit, + refundRecipient: refundRecipient, + secondBridgeValue: secondBridgeValue, + secondBridgeCalldata: secondBridgeCalldata + }); + + _setUpBaseTokenForChainId(l2TxnReq2BridgeOut.chainId, false, address(testToken)); + assertTrue(bridgeHub.baseToken(l2TxnReq2BridgeOut.chainId) == address(testToken)); + assertTrue(bridgeHub.getZKChain(l2TxnReq2BridgeOut.chainId) == address(mockChainContract)); + + address randomCaller = makeAddr("RANDOM_CALLER"); + + mockChainContract.setBridgeHubAddress(address(bridgeHub)); + + { + bytes32 canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); + + vm.mockCall( + address(mockChainContract), + abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), + abi.encode(canonicalHash) + ); + } + + if (msgValue != secondBridgeValue) { + vm.deal(randomCaller, msgValue); + vm.expectRevert( + abi.encodeWithSelector(MsgValueMismatch.selector, l2TxnReq2BridgeOut.secondBridgeValue, msgValue) + ); + vm.prank(randomCaller); + bridgeHub.requestL2TransactionTwoBridges{value: msgValue}(l2TxnReq2BridgeOut); + } + + testToken.mint(randomCaller, l2TxnReq2BridgeOut.mintValue); + assertEq(testToken.balanceOf(randomCaller), l2TxnReq2BridgeOut.mintValue); + vm.prank(randomCaller); + testToken.approve(sharedBridgeAddress, l2TxnReq2BridgeOut.mintValue); + + vm.deal(randomCaller, l2TxnReq2BridgeOut.secondBridgeValue); + vm.prank(randomCaller); + bridgeHub.requestL2TransactionTwoBridges{value: randomCaller.balance}(l2TxnReq2BridgeOut); + } + + ///////////////////////////////////////////////////////// + // INTERNAL UTILITY FUNCTIONS + ///////////////////////////////////////////////////////// + + function _createMockL2TransactionRequestTwoBridgesOuter( + uint256 chainId, + uint256 mintValue, + uint256 l2Value, + uint256 l2GasLimit, + uint256 l2GasPerPubdataByteLimit, + address refundRecipient, + uint256 secondBridgeValue, + bytes memory secondBridgeCalldata + ) internal view returns (L2TransactionRequestTwoBridgesOuter memory) { + L2TransactionRequestTwoBridgesOuter memory l2Req; + + // Don't let the mintValue + secondBridgeValue go beyond type(uint256).max since that calculation is required to be done by our test: test_requestL2TransactionTwoBridges_ETHCase + + mintValue = bound(mintValue, 0, (type(uint256).max) / 2); + secondBridgeValue = bound(secondBridgeValue, 0, (type(uint256).max) / 2); + + l2Req.chainId = chainId; + l2Req.mintValue = mintValue; + l2Req.l2Value = l2Value; + l2Req.l2GasLimit = l2GasLimit; + l2Req.l2GasPerPubdataByteLimit = l2GasPerPubdataByteLimit; + l2Req.refundRecipient = refundRecipient; + l2Req.secondBridgeAddress = secondBridgeAddress; + l2Req.secondBridgeValue = secondBridgeValue; + l2Req.secondBridgeCalldata = secondBridgeCalldata; + + return l2Req; + } + + function _createMockL2Message( + uint16 randomTxNumInBatch, + address randomSender, + bytes memory randomData + ) internal pure returns (L2Message memory) { + L2Message memory l2Message; + + l2Message.txNumberInBatch = randomTxNumInBatch; + l2Message.sender = randomSender; + l2Message.data = randomData; + + return l2Message; + } + + function _createMockL2Log( + uint8 randomL2ShardId, + bool randomIsService, + uint16 randomTxNumInBatch, + address randomSender, + bytes32 randomKey, + bytes32 randomValue + ) internal pure returns (L2Log memory) { + L2Log memory l2Log; + + l2Log.l2ShardId = randomL2ShardId; + l2Log.isService = randomIsService; + l2Log.txNumberInBatch = randomTxNumInBatch; + l2Log.sender = randomSender; + l2Log.key = randomKey; + l2Log.value = randomValue; + + return l2Log; + } + + function _createNewChainInitData( + bool isFreezable, + bytes4[] memory mockSelectors, + address, //mockInitAddress, + bytes memory //mockInitCalldata + ) internal returns (bytes memory) { + bytes4[] memory singleSelector = new bytes4[](1); + singleSelector[0] = bytes4(0xabcdef12); + + Diamond.FacetCut memory facetCut; + Diamond.DiamondCutData memory diamondCutData; + + facetCut.facet = address(this); // for a random address, it will fail the check of _facet.code.length > 0 + facetCut.action = Diamond.Action.Add; + facetCut.isFreezable = isFreezable; + if (mockSelectors.length == 0) { + mockSelectors = singleSelector; + } + facetCut.selectors = mockSelectors; + + Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](1); + facetCuts[0] = facetCut; + + diamondCutData.facetCuts = facetCuts; + diamondCutData.initAddress = address(0); + diamondCutData.initCalldata = ""; + + ChainCreationParams memory params = ChainCreationParams({ + diamondCut: diamondCutData, + // Just some dummy values: + genesisUpgrade: address(0x01), + genesisBatchHash: bytes32(uint256(0x01)), + genesisIndexRepeatedStorageChanges: uint64(0x01), + genesisBatchCommitment: bytes32(uint256(0x01)), + forceDeploymentsData: bytes("") + }); + + mockCTM.setChainCreationParams(params); + + return abi.encode(abi.encode(diamondCutData), bytes("")); + } + + function _setUpZKChainForChainId(uint256 mockChainId) internal returns (uint256 mockChainIdInRange) { + mockChainId = bound(mockChainId, 1, type(uint48).max); + mockChainIdInRange = mockChainId; + + if (!bridgeHub.chainTypeManagerIsRegistered(address(mockCTM))) { + vm.prank(bridgeOwner); + bridgeHub.addChainTypeManager(address(mockCTM)); + } + + // We need to set the chainTypeManager of the mockChainId to mockCTM + // There is no function to do that in the bridgeHub + // So, perhaps we will have to manually set the values in the chainTypeManager mapping via a foundry cheatcode + assertTrue(!(bridgeHub.chainTypeManager(mockChainId) == address(mockCTM))); + + dummyBridgehub.setCTM(mockChainId, address(mockCTM)); + dummyBridgehub.setZKChain(mockChainId, address(mockChainContract)); + } + + function _setUpBaseTokenForChainId(uint256 mockChainId, bool tokenIsETH, address token) internal { + if (tokenIsETH) { + token = ETH_TOKEN_ADDRESS; + } else { + ntv.registerToken(token); + } + + bytes32 baseTokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, token); + + stdstore.target(address(bridgeHub)).sig("baseTokenAssetId(uint256)").with_key(mockChainId).checked_write( + baseTokenAssetId + ); + } + + function _createMockL2TransactionRequestDirect( + uint256 mockChainId, + uint256 mockMintValue, + address mockL2Contract, + uint256 mockL2Value, + bytes memory mockL2Calldata, + uint256 mockL2GasLimit, + // solhint-disable-next-line no-unused-vars + uint256 mockL2GasPerPubdataByteLimit, + bytes[] memory mockFactoryDeps, + address mockRefundRecipient + ) internal pure returns (L2TransactionRequestDirect memory) { + L2TransactionRequestDirect memory l2TxnReqDirect; + + l2TxnReqDirect.chainId = mockChainId; + l2TxnReqDirect.mintValue = mockMintValue; + l2TxnReqDirect.l2Contract = mockL2Contract; + l2TxnReqDirect.l2Value = mockL2Value; + l2TxnReqDirect.l2Calldata = mockL2Calldata; + l2TxnReqDirect.l2GasLimit = mockL2GasLimit; + l2TxnReqDirect.l2GasPerPubdataByteLimit = REQUIRED_L2_GAS_PRICE_PER_PUBDATA; + l2TxnReqDirect.factoryDeps = mockFactoryDeps; + l2TxnReqDirect.refundRecipient = mockRefundRecipient; + + return l2TxnReqDirect; + } + + function _createBhL2TxnRequest( + bytes[] memory mockFactoryDepsBH + ) internal returns (BridgehubL2TransactionRequest memory) { + BridgehubL2TransactionRequest memory bhL2TxnRequest; + + bhL2TxnRequest.sender = makeAddr("BH_L2_REQUEST_SENDER"); + bhL2TxnRequest.contractL2 = makeAddr("BH_L2_REQUEST_CONTRACT"); + bhL2TxnRequest.mintValue = block.timestamp; + bhL2TxnRequest.l2Value = block.timestamp * 2; + bhL2TxnRequest.l2Calldata = abi.encode("mock L2 Calldata"); + bhL2TxnRequest.l2GasLimit = block.timestamp * 3; + bhL2TxnRequest.l2GasPerPubdataByteLimit = block.timestamp * 4; + bhL2TxnRequest.factoryDeps = mockFactoryDepsBH; + bhL2TxnRequest.refundRecipient = makeAddr("BH_L2_REQUEST_REFUND_RECIPIENT"); + + return bhL2TxnRequest; + } + + function _restrictArraySize(bytes[] memory longArray, uint256 newSize) internal pure returns (bytes[] memory) { + bytes[] memory shortArray = new bytes[](newSize); + + for (uint256 i; i < newSize; i++) { + shortArray[i] = longArray[i]; + } + + return shortArray; + } + + ///////////////////////////////////////////////////////// + // OLDER (HIGH-LEVEL MOCKED) TESTS + //////////////////////////////////////////////////////// + + function test_proveL2MessageInclusion_old( + uint256 mockChainId, + uint256 mockBatchNumber, + uint256 mockIndex, + bytes32[] memory mockProof, + uint16 randomTxNumInBatch, + address randomSender, + bytes memory randomData + ) public { + vm.startPrank(bridgeOwner); + bridgeHub.addChainTypeManager(address(mockCTM)); + vm.stopPrank(); + + L2Message memory l2Message = _createMockL2Message(randomTxNumInBatch, randomSender, randomData); + + vm.mockCall( + address(bridgeHub), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + bridgeHub.proveL2MessageInclusion.selector, + mockChainId, + mockBatchNumber, + mockIndex, + l2Message, + mockProof + ), + abi.encode(true) + ); + + assertTrue( + bridgeHub.proveL2MessageInclusion({ + _chainId: mockChainId, + _batchNumber: mockBatchNumber, + _index: mockIndex, + _message: l2Message, + _proof: mockProof + }) + ); + } + + function test_proveL2LogInclusion_old( + uint256 mockChainId, + uint256 mockBatchNumber, + uint256 mockIndex, + bytes32[] memory mockProof, + uint8 randomL2ShardId, + bool randomIsService, + uint16 randomTxNumInBatch, + address randomSender, + bytes32 randomKey, + bytes32 randomValue + ) public { + vm.startPrank(bridgeOwner); + bridgeHub.addChainTypeManager(address(mockCTM)); + vm.stopPrank(); + + L2Log memory l2Log = _createMockL2Log({ + randomL2ShardId: randomL2ShardId, + randomIsService: randomIsService, + randomTxNumInBatch: randomTxNumInBatch, + randomSender: randomSender, + randomKey: randomKey, + randomValue: randomValue + }); + + vm.mockCall( + address(bridgeHub), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + bridgeHub.proveL2LogInclusion.selector, + mockChainId, + mockBatchNumber, + mockIndex, + l2Log, + mockProof + ), + abi.encode(true) + ); + + assertTrue( + bridgeHub.proveL2LogInclusion({ + _chainId: mockChainId, + _batchNumber: mockBatchNumber, + _index: mockIndex, + _log: l2Log, + _proof: mockProof + }) + ); + } + + function test_proveL1ToL2TransactionStatus_old( + uint256 randomChainId, + bytes32 randomL2TxHash, + uint256 randomL2BatchNumber, + uint256 randomL2MessageIndex, + uint16 randomL2TxNumberInBatch, + bytes32[] memory randomMerkleProof, + bool randomResultantBool + ) public { + vm.startPrank(bridgeOwner); + bridgeHub.addChainTypeManager(address(mockCTM)); + vm.stopPrank(); + + TxStatus txStatus; + + if (randomChainId % 2 == 0) { + txStatus = TxStatus.Failure; + } else { + txStatus = TxStatus.Success; + } + + vm.mockCall( + address(bridgeHub), + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + bridgeHub.proveL1ToL2TransactionStatus.selector, + randomChainId, + randomL2TxHash, + randomL2BatchNumber, + randomL2MessageIndex, + randomL2TxNumberInBatch, + randomMerkleProof, + txStatus + ), + abi.encode(randomResultantBool) + ); + + assertTrue( + bridgeHub.proveL1ToL2TransactionStatus({ + _chainId: randomChainId, + _l2TxHash: randomL2TxHash, + _l2BatchNumber: randomL2BatchNumber, + _l2MessageIndex: randomL2MessageIndex, + _l2TxNumberInBatch: randomL2TxNumberInBatch, + _merkleProof: randomMerkleProof, + _status: txStatus + }) == randomResultantBool + ); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/ClaimFailedDeposit.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/ClaimFailedDeposit.t.sol new file mode 100644 index 000000000..aecde91f8 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/ClaimFailedDeposit.t.sol @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {L1Erc20BridgeTest} from "./_L1Erc20Bridge_Shared.t.sol"; +import {StdStorage, stdStorage} from "forge-std/Test.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {EmptyDeposit} from "contracts/common/L1ContractErrors.sol"; + +contract ClaimFailedDepositTest is L1Erc20BridgeTest { + using stdStorage for StdStorage; + + event ClaimedFailedDeposit(address indexed to, address indexed l1Token, uint256 amount); + + function test_RevertWhen_ClaimAmountIsZero() public { + vm.expectRevert(EmptyDeposit.selector); + bytes32[] memory merkleProof; + + bridge.claimFailedDeposit({ + _depositSender: randomSigner, + _l1Token: address(token), + _l2TxHash: bytes32(""), + _l2BatchNumber: 0, + _l2MessageIndex: 0, + _l2TxNumberInBatch: 0, + _merkleProof: merkleProof + }); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Deposit.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Deposit.t.sol similarity index 69% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Deposit.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Deposit.t.sol index 67f16aab8..3e4d305f6 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Deposit.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Deposit.t.sol @@ -2,7 +2,10 @@ pragma solidity 0.8.24; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; import {L1Erc20BridgeTest} from "./_L1Erc20Bridge_Shared.t.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {EmptyDeposit, ValueMismatch, TokensWithFeesNotSupported} from "contracts/common/L1ContractErrors.sol"; contract DepositTest is L1Erc20BridgeTest { event DepositInitiated( @@ -14,7 +17,7 @@ contract DepositTest is L1Erc20BridgeTest { ); function test_RevertWhen_depositAmountIsZero() public { - vm.expectRevert(bytes("0T")); + vm.expectRevert(EmptyDeposit.selector); bridge.deposit({ _l2Receiver: randomSigner, _l1Token: address(token), @@ -26,7 +29,7 @@ contract DepositTest is L1Erc20BridgeTest { } function test_RevertWhen_legacyDepositAmountIsZero() public { - vm.expectRevert(bytes("0T")); + vm.expectRevert(EmptyDeposit.selector); bridge.deposit({ _l2Receiver: randomSigner, _l1Token: address(token), @@ -82,9 +85,14 @@ contract DepositTest is L1Erc20BridgeTest { function test_RevertWhen_depositTransferAmountIsDifferent() public { uint256 amount = 2; + vm.mockCall( + address(feeOnTransferToken), + abi.encodeWithSelector(IERC20.balanceOf.selector), + abi.encode(amount + 1) + ); vm.prank(alice); feeOnTransferToken.approve(address(bridge), amount); - vm.expectRevert(bytes("3T")); + vm.expectRevert(TokensWithFeesNotSupported.selector); vm.prank(alice); bridge.deposit({ _l2Receiver: randomSigner, @@ -97,9 +105,14 @@ contract DepositTest is L1Erc20BridgeTest { function test_RevertWhen_legacyDepositTransferAmountIsDifferent() public { uint256 amount = 4; + vm.mockCall( + address(feeOnTransferToken), + abi.encodeWithSelector(IERC20.balanceOf.selector), + abi.encode(amount + 1) + ); vm.prank(alice); feeOnTransferToken.approve(address(bridge), amount); - vm.expectRevert(bytes("3T")); + vm.expectRevert(TokensWithFeesNotSupported.selector); vm.prank(alice); bridge.deposit({ _l2Receiver: randomSigner, @@ -112,13 +125,30 @@ contract DepositTest is L1Erc20BridgeTest { function test_depositSuccessfully() public { uint256 amount = 8; + bytes32 l2TxHash = keccak256("txHash"); + + vm.mockCall( + sharedBridgeAddress, + abi.encodeWithSelector( + IL1AssetRouter.depositLegacyErc20Bridge.selector, + alice, + randomSigner, + address(token), + amount, + 0, + 0, + address(0) + ), + abi.encode(l2TxHash) + ); + vm.prank(alice); token.approve(address(bridge), amount); vm.prank(alice); // solhint-disable-next-line func-named-parameters vm.expectEmit(true, true, true, true, address(bridge)); // solhint-disable-next-line func-named-parameters - emit DepositInitiated(dummyL2DepositTxHash, alice, randomSigner, address(token), amount); + emit DepositInitiated(l2TxHash, alice, randomSigner, address(token), amount); bytes32 txHash = bridge.deposit({ _l2Receiver: randomSigner, _l1Token: address(token), @@ -127,24 +157,41 @@ contract DepositTest is L1Erc20BridgeTest { _l2TxGasPerPubdataByte: 0, _refundRecipient: address(0) }); - assertEq(txHash, dummyL2DepositTxHash); + assertEq(txHash, l2TxHash); - uint256 depositedAmount = bridge.depositAmount(alice, address(token), dummyL2DepositTxHash); + uint256 depositedAmount = bridge.depositAmount(alice, address(token), l2TxHash); assertEq(amount, depositedAmount); } function test_legacyDepositSuccessfully() public { - uint256 depositedAmountBefore = bridge.depositAmount(alice, address(token), dummyL2DepositTxHash); + uint256 amount = 8; + bytes32 l2TxHash = keccak256("txHash"); + + uint256 depositedAmountBefore = bridge.depositAmount(alice, address(token), l2TxHash); assertEq(depositedAmountBefore, 0); - uint256 amount = 8; + vm.mockCall( + sharedBridgeAddress, + abi.encodeWithSelector( + IL1AssetRouter.depositLegacyErc20Bridge.selector, + alice, + randomSigner, + address(token), + amount, + 0, + 0, + address(0) + ), + abi.encode(l2TxHash) + ); + vm.prank(alice); token.approve(address(bridge), amount); vm.prank(alice); // solhint-disable-next-line func-named-parameters vm.expectEmit(true, true, true, true, address(bridge)); // solhint-disable-next-line func-named-parameters - emit DepositInitiated(dummyL2DepositTxHash, alice, randomSigner, address(token), amount); + emit DepositInitiated(l2TxHash, alice, randomSigner, address(token), amount); bytes32 txHash = bridge.deposit({ _l2Receiver: randomSigner, _l1Token: address(token), @@ -152,9 +199,9 @@ contract DepositTest is L1Erc20BridgeTest { _l2TxGasLimit: 0, _l2TxGasPerPubdataByte: 0 }); - assertEq(txHash, dummyL2DepositTxHash); + assertEq(txHash, l2TxHash); - uint256 depositedAmount = bridge.depositAmount(alice, address(token), dummyL2DepositTxHash); + uint256 depositedAmount = bridge.depositAmount(alice, address(token), l2TxHash); assertEq(amount, depositedAmount); } } diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/FinalizeWithdrawal.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/FinalizeWithdrawal.sol similarity index 56% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/FinalizeWithdrawal.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/FinalizeWithdrawal.sol index 830c77953..e5a86bc2d 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/FinalizeWithdrawal.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/FinalizeWithdrawal.sol @@ -3,7 +3,12 @@ pragma solidity 0.8.24; import {L1Erc20BridgeTest} from "./_L1Erc20Bridge_Shared.t.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; import {StdStorage, stdStorage} from "forge-std/Test.sol"; +import {WithdrawalAlreadyFinalized} from "contracts/common/L1ContractErrors.sol"; +import {IL1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {FinalizeL1DepositParams} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {L2_ASSET_ROUTER_ADDR} from "contracts/common/L2ContractAddresses.sol"; contract FinalizeWithdrawalTest is L1Erc20BridgeTest { using stdStorage for StdStorage; @@ -22,7 +27,7 @@ contract FinalizeWithdrawalTest is L1Erc20BridgeTest { assertTrue(bridge.isWithdrawalFinalized(l2BatchNumber, l2MessageIndex)); - vm.expectRevert(bytes("pw")); + vm.expectRevert(WithdrawalAlreadyFinalized.selector); bytes32[] memory merkleProof; bridge.finalizeWithdrawal({ _l2BatchNumber: l2BatchNumber, @@ -36,17 +41,37 @@ contract FinalizeWithdrawalTest is L1Erc20BridgeTest { function test_finalizeWithdrawalSuccessfully() public { uint256 l2BatchNumber = 3; uint256 l2MessageIndex = 4; + uint256 txNumberInBatch = 0; + bytes32[] memory merkleProof; uint256 amount = 999; assertFalse(bridge.isWithdrawalFinalized(l2BatchNumber, l2MessageIndex)); - - dummySharedBridge.setDataToBeReturnedInFinalizeWithdrawal(alice, address(token), amount); + FinalizeL1DepositParams memory finalizeWithdrawalParams = FinalizeL1DepositParams({ + chainId: eraChainId, + l2BatchNumber: l2BatchNumber, + l2MessageIndex: l2MessageIndex, + l2Sender: L2_ASSET_ROUTER_ADDR, + l2TxNumberInBatch: uint16(txNumberInBatch), + message: "", + merkleProof: merkleProof + }); + vm.mockCall( + l1NullifierAddress, + abi.encodeWithSelector(IL1Nullifier.finalizeDeposit.selector, finalizeWithdrawalParams), + abi.encode(alice, address(token), amount) + ); + address l2BridgeAddress = address(12); + vm.mockCall( + l1NullifierAddress, + abi.encodeWithSelector(IL1Nullifier.l2BridgeAddress.selector, eraChainId), + abi.encode(l2BridgeAddress) + ); vm.prank(alice); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(bridge)); - emit WithdrawalFinalized(alice, address(token), amount); - bytes32[] memory merkleProof; + // vm.expectEmit(true, true, true, true, address(bridge)); + // emit WithdrawalFinalized(alice, address(token), amount); + bridge.finalizeWithdrawal({ _l2BatchNumber: l2BatchNumber, _l2MessageIndex: l2MessageIndex, diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Getters.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Getters.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Getters.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Getters.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Initialization.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Initialization.t.sol similarity index 70% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Initialization.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Initialization.t.sol index 281ec169a..d3e5c9357 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Initialization.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Initialization.t.sol @@ -3,10 +3,11 @@ pragma solidity 0.8.24; import {L1Erc20BridgeTest} from "./_L1Erc20Bridge_Shared.t.sol"; +import {SlotOccupied} from "contracts/common/L1ContractErrors.sol"; contract InitializationTest is L1Erc20BridgeTest { function test_RevertWhen_DoubleInitialization() public { - vm.expectRevert(bytes("1B")); + vm.expectRevert(SlotOccupied.selector); bridge.initialize(); } } diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Reentrancy.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Reentrancy.t.sol similarity index 94% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Reentrancy.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Reentrancy.t.sol index 7a6183f93..528239434 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/Reentrancy.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/Reentrancy.t.sol @@ -5,6 +5,7 @@ pragma solidity 0.8.24; import {StdStorage, stdStorage} from "forge-std/Test.sol"; import {L1Erc20BridgeTest} from "./_L1Erc20Bridge_Shared.t.sol"; import {ReenterL1ERC20Bridge} from "contracts/dev-contracts/test/ReenterL1ERC20Bridge.sol"; +import {SlotOccupied, Reentrancy} from "contracts/common/L1ContractErrors.sol"; contract ReentrancyTest is L1Erc20BridgeTest { using stdStorage for StdStorage; @@ -15,7 +16,7 @@ contract ReentrancyTest is L1Erc20BridgeTest { token.approve(address(bridgeReenterItself), amount); vm.prank(alice); - vm.expectRevert(bytes("r1")); + vm.expectRevert(Reentrancy.selector); bridgeReenterItself.deposit({ _l2Receiver: randomSigner, _l1Token: address(token), @@ -32,7 +33,7 @@ contract ReentrancyTest is L1Erc20BridgeTest { token.approve(address(bridgeReenterItself), amount); vm.prank(alice); - vm.expectRevert(bytes("r1")); + vm.expectRevert(Reentrancy.selector); bridgeReenterItself.deposit({ _l2Receiver: randomSigner, _l1Token: address(token), @@ -49,16 +50,16 @@ contract ReentrancyTest is L1Erc20BridgeTest { .sig("depositAmount(address,address,bytes32)") .with_key(alice) .with_key(address(token)) - .with_key(dummyL2DepositTxHash) + .with_key(bytes32("")) .checked_write(amount); vm.prank(alice); bytes32[] memory merkleProof; - vm.expectRevert(bytes("r1")); + vm.expectRevert(Reentrancy.selector); bridgeReenterItself.claimFailedDeposit({ _depositSender: alice, _l1Token: address(token), - _l2TxHash: dummyL2DepositTxHash, + _l2TxHash: bytes32(""), _l2BatchNumber: 0, _l2MessageIndex: 0, _l2TxNumberInBatch: 0, @@ -71,7 +72,7 @@ contract ReentrancyTest is L1Erc20BridgeTest { uint256 l2MessageIndex = 4; vm.prank(alice); - vm.expectRevert(bytes("r1")); + vm.expectRevert(Reentrancy.selector); bytes32[] memory merkleProof; bridgeReenterItself.finalizeWithdrawal({ _l2BatchNumber: l2BatchNumber, diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/_L1Erc20Bridge_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/_L1Erc20Bridge_Shared.t.sol similarity index 52% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/_L1Erc20Bridge_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/_L1Erc20Bridge_Shared.t.sol index 6add8395e..fb0c30c58 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/_L1Erc20Bridge_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1Erc20Bridge/_L1Erc20Bridge_Shared.t.sol @@ -1,20 +1,24 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8.17; +pragma solidity ^0.8.21; +import {StdStorage, stdStorage} from "forge-std/Test.sol"; import {Test} from "forge-std/Test.sol"; import {L1ERC20Bridge} from "contracts/bridge/L1ERC20Bridge.sol"; -import {IL1SharedBridge} from "contracts/bridge/interfaces/IL1SharedBridge.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; import {FeeOnTransferToken} from "contracts/dev-contracts/FeeOnTransferToken.sol"; -import {DummySharedBridge} from "contracts/dev-contracts/test/DummySharedBridge.sol"; import {ReenterL1ERC20Bridge} from "contracts/dev-contracts/test/ReenterL1ERC20Bridge.sol"; +import {DummySharedBridge} from "contracts/dev-contracts/test/DummySharedBridge.sol"; import {Utils} from "../../Utils/Utils.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {IL1Nullifier} from "contracts/bridge/L1Nullifier.sol"; contract L1Erc20BridgeTest is Test { L1ERC20Bridge internal bridge; - DummySharedBridge internal dummySharedBridge; ReenterL1ERC20Bridge internal reenterL1ERC20Bridge; L1ERC20Bridge internal bridgeReenterItself; @@ -23,18 +27,42 @@ contract L1Erc20BridgeTest is Test { TestnetERC20Token internal feeOnTransferToken; address internal randomSigner; address internal alice; + address sharedBridgeAddress; + address l1NullifierAddress; bytes32 internal dummyL2DepositTxHash; + uint256 eraChainId = 9; constructor() { randomSigner = makeAddr("randomSigner"); dummyL2DepositTxHash = Utils.randomBytes32("dummyL2DepositTxHash"); + sharedBridgeAddress = makeAddr("sharedBridgeAddress"); alice = makeAddr("alice"); + l1NullifierAddress = makeAddr("l1NullifierAddress"); + + bridge = new L1ERC20Bridge( + IL1Nullifier(l1NullifierAddress), + IL1AssetRouter(sharedBridgeAddress), + IL1NativeTokenVault(address(1)), + eraChainId + ); + + address weth = makeAddr("weth"); + L1NativeTokenVault ntv = new L1NativeTokenVault( + weth, + sharedBridgeAddress, + eraChainId, + IL1Nullifier(l1NullifierAddress) + ); - dummySharedBridge = new DummySharedBridge(dummyL2DepositTxHash); - bridge = new L1ERC20Bridge(IL1SharedBridge(address(dummySharedBridge))); + vm.store(address(bridge), bytes32(uint256(212)), bytes32(0)); reenterL1ERC20Bridge = new ReenterL1ERC20Bridge(); - bridgeReenterItself = new L1ERC20Bridge(IL1SharedBridge(address(reenterL1ERC20Bridge))); + bridgeReenterItself = new L1ERC20Bridge( + IL1Nullifier(address(reenterL1ERC20Bridge)), + IL1AssetRouter(address(reenterL1ERC20Bridge)), + ntv, + eraChainId + ); reenterL1ERC20Bridge.setBridge(bridgeReenterItself); token = new TestnetERC20Token("TestnetERC20Token", "TET", 18); diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeBase.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeBase.t.sol similarity index 55% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeBase.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeBase.t.sol index c16ab2898..dd3b8c145 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeBase.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeBase.t.sol @@ -1,86 +1,121 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -import {L1SharedBridgeTest} from "./_L1SharedBridge_Shared.t.sol"; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; + +import {L1AssetRouterTest} from "./_L1SharedBridge_Shared.t.sol"; import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; import {L2Message, TxStatus} from "contracts/common/Messaging.sol"; import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; -import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; -import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {IL1AssetHandler} from "contracts/bridge/interfaces/IL1AssetHandler.sol"; +import {IL1BaseTokenAssetHandler} from "contracts/bridge/interfaces/IL1BaseTokenAssetHandler.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, L2_ASSET_ROUTER_ADDR} from "contracts/common/L2ContractAddresses.sol"; import {IGetters} from "contracts/state-transition/chain-interfaces/IGetters.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {StdStorage, stdStorage} from "forge-std/Test.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; + +contract L1AssetRouterTestBase is L1AssetRouterTest { + using stdStorage for StdStorage; + + function test_bridgehubPause() public { + vm.prank(owner); + sharedBridge.pause(); + assertEq(sharedBridge.paused(), true, "Shared Bridge Not Paused"); + } + + function test_bridgehubUnpause() public { + vm.prank(owner); + sharedBridge.pause(); + assertEq(sharedBridge.paused(), true, "Shared Bridge Not Paused"); + vm.prank(owner); + sharedBridge.unpause(); + assertEq(sharedBridge.paused(), false, "Shared Bridge Remains Paused"); + } -contract L1SharedBridgeTestBase is L1SharedBridgeTest { function test_bridgehubDepositBaseToken_Eth() public { - vm.deal(bridgehubAddress, amount); vm.prank(bridgehubAddress); // solhint-disable-next-line func-named-parameters vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit BridgehubDepositBaseTokenInitiated(chainId, alice, ETH_TOKEN_ADDRESS, amount); - sharedBridge.bridgehubDepositBaseToken{value: amount}(chainId, alice, ETH_TOKEN_ADDRESS, amount); + emit BridgehubDepositBaseTokenInitiated(chainId, alice, ETH_TOKEN_ASSET_ID, amount); + sharedBridge.bridgehubDepositBaseToken{value: amount}(chainId, ETH_TOKEN_ASSET_ID, alice, amount); } function test_bridgehubDepositBaseToken_Erc() public { - token.mint(alice, amount); + vm.prank(bridgehubAddress); + // solhint-disable-next-line func-named-parameters + vm.expectEmit(true, true, true, true, address(sharedBridge)); + emit BridgehubDepositBaseTokenInitiated(chainId, alice, tokenAssetId, amount); + sharedBridge.bridgehubDepositBaseToken(chainId, tokenAssetId, alice, amount); + } + + function test_bridgehubDepositBaseToken_Erc_NoApproval() public { vm.prank(alice); - token.approve(address(sharedBridge), amount); + token.approve(address(nativeTokenVault), 0); vm.prank(bridgehubAddress); // solhint-disable-next-line func-named-parameters vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit BridgehubDepositBaseTokenInitiated(chainId, alice, address(token), amount); - sharedBridge.bridgehubDepositBaseToken(chainId, alice, address(token), amount); + emit BridgehubDepositBaseTokenInitiated(chainId, alice, tokenAssetId, amount); + sharedBridge.bridgehubDepositBaseToken(chainId, tokenAssetId, alice, amount); } function test_bridgehubDeposit_Eth() public { - vm.deal(bridgehubAddress, amount); - vm.prank(bridgehubAddress); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); + _setBaseTokenAssetId(tokenAssetId); + bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); + bytes memory mintCalldata = abi.encode( + alice, + bob, + address(ETH_TOKEN_ADDRESS), + amount, + nativeTokenVault.getERC20Getters(address(ETH_TOKEN_ADDRESS), chainId) + ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + vm.prank(bridgehubAddress); emit BridgehubDepositInitiated({ chainId: chainId, txDataHash: txDataHash, from: alice, - to: zkSync, - l1Token: ETH_TOKEN_ADDRESS, - amount: amount + assetId: ETH_TOKEN_ASSET_ID, + bridgeMintCalldata: mintCalldata }); - sharedBridge.bridgehubDeposit{value: amount}(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, 0, bob)); + sharedBridge.bridgehubDeposit{value: amount}(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, amount, bob)); } function test_bridgehubDeposit_Erc() public { - token.mint(alice, amount); - vm.prank(alice); - token.approve(address(sharedBridge), amount); vm.prank(bridgehubAddress); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); + vm.expectEmit(true, true, true, false, address(sharedBridge)); bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); emit BridgehubDepositInitiated({ chainId: chainId, txDataHash: txDataHash, from: alice, - to: zkSync, - l1Token: address(token), - amount: amount + assetId: tokenAssetId, + bridgeMintCalldata: abi.encode(amount, bob) }); sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(address(token), amount, bob)); } + function test_bridgehubDeposit_Erc_CustomAssetHandler() public { + // ToDo: remove the mock call and register custom asset handler? + vm.mockCall( + address(nativeTokenVault), + abi.encodeWithSelector(IL1BaseTokenAssetHandler.tokenAddress.selector, tokenAssetId), + abi.encode(address(0)) + ); + vm.prank(bridgehubAddress); + sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(address(token), amount, bob)); + } + function test_bridgehubConfirmL2Transaction() public { // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); + vm.expectEmit(true, true, true, false, address(l1Nullifier)); bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); emit BridgehubDepositFinalized(chainId, txDataHash, txHash); vm.prank(bridgehubAddress); @@ -88,11 +123,9 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { } function test_claimFailedDeposit_Erc() public { - token.mint(address(sharedBridge), amount); bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); - require(sharedBridge.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); - _setSharedBridgeChainBalance(chainId, address(token), amount); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); vm.mockCall( bridgehubAddress, @@ -111,9 +144,13 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit ClaimedFailedDepositSharedBridge({chainId: chainId, to: alice, l1Token: address(token), amount: amount}); - sharedBridge.claimFailedDeposit({ + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit ClaimedFailedDepositAssetRouter({ + chainId: chainId, + assetId: tokenAssetId, + assetData: abi.encode(bytes32(0)) + }); + l1Nullifier.claimFailedDeposit({ _chainId: chainId, _depositSender: alice, _l1Token: address(token), @@ -127,12 +164,9 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { } function test_claimFailedDeposit_Eth() public { - vm.deal(address(sharedBridge), amount); - bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); - require(sharedBridge.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); - _setSharedBridgeChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); vm.mockCall( bridgehubAddress, @@ -151,14 +185,13 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit ClaimedFailedDepositSharedBridge({ + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit ClaimedFailedDepositAssetRouter({ chainId: chainId, - to: alice, - l1Token: ETH_TOKEN_ADDRESS, - amount: amount + assetId: ETH_TOKEN_ASSET_ID, + assetData: abi.encode(bytes32(0)) }); - sharedBridge.claimFailedDeposit({ + l1Nullifier.claimFailedDeposit({ _chainId: chainId, _depositSender: alice, _l1Token: ETH_TOKEN_ADDRESS, @@ -171,69 +204,53 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { }); } - function test_finalizeWithdrawal_EthOnEth() public { - vm.deal(address(sharedBridge), amount); - - _setSharedBridgeChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - - bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); - L2Message memory l2ToL1Message = L2Message({ - txNumberInBatch: l2TxNumberInBatch, - sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, - data: message - }); + function test_bridgeRecoverFailedTransfer_Eth() public { + bytes memory transferData = abi.encode(amount, alice); + bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); + _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); vm.mockCall( bridgehubAddress, // solhint-disable-next-line func-named-parameters abi.encodeWithSelector( - IBridgehub.proveL2MessageInclusion.selector, + IBridgehub.proveL1ToL2TransactionStatus.selector, chainId, + txHash, l2BatchNumber, l2MessageIndex, - l2ToL1Message, - merkleProof + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure ), abi.encode(true) ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, ETH_TOKEN_ADDRESS, amount); - sharedBridge.finalizeWithdrawal({ + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit ClaimedFailedDepositAssetRouter({ + chainId: chainId, + assetId: ETH_TOKEN_ASSET_ID, + assetData: abi.encode(bytes32(0)) + }); + l1Nullifier.bridgeRecoverFailedTransfer({ _chainId: chainId, + _depositSender: alice, + _assetId: ETH_TOKEN_ASSET_ID, + _assetData: transferData, + _l2TxHash: txHash, _l2BatchNumber: l2BatchNumber, _l2MessageIndex: l2MessageIndex, _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, _merkleProof: merkleProof }); } - function test_finalizeWithdrawal_ErcOnEth() public { - token.mint(address(sharedBridge), amount); - - _setSharedBridgeChainBalance(chainId, address(token), amount); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - - bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount - ); + function test_finalizeWithdrawal_EthOnEth() public { + bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: l2SharedBridge, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, data: message }); @@ -252,8 +269,8 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, address(token), amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, ETH_TOKEN_ASSET_ID, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -264,25 +281,17 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { }); } - function test_finalizeWithdrawal_EthOnErc() public { - vm.deal(address(sharedBridge), amount); - - _setSharedBridgeChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); - + function test_finalizeWithdrawal_ErcOnEth() public { + _setNativeTokenVaultChainBalance(chainId, address(token), amount); bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - ETH_TOKEN_ADDRESS, - amount + IAssetRouterBase.finalizeDeposit.selector, + chainId, + tokenAssetId, + abi.encode(0, alice, 0, amount, new bytes(0)) ); L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: l2SharedBridge, + sender: L2_ASSET_ROUTER_ADDR, data: message }); @@ -301,8 +310,8 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, ETH_TOKEN_ADDRESS, amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, tokenAssetId, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -313,25 +322,22 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { }); } - function test_finalizeWithdrawal_BaseErcOnErc() public { - token.mint(address(sharedBridge), amount); + function test_finalizeWithdrawal_EthOnErc() public { + // vm.deal(address(sharedBridge), amount); - _setSharedBridgeChainBalance(chainId, address(token), amount); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); + // _setNativeTokenVaultChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); + _setBaseTokenAssetId(tokenAssetId); + vm.prank(bridgehubAddress); bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount + IAssetRouterBase.finalizeDeposit.selector, + chainId, + ETH_TOKEN_ASSET_ID, + abi.encode(0, alice, 0, amount, new bytes(0)) ); L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + sender: L2_ASSET_ROUTER_ADDR, data: message }); @@ -350,8 +356,8 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, address(token), amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, ETH_TOKEN_ASSET_ID, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -362,21 +368,19 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { }); } - function test_finalizeWithdrawal_NonBaseErcOnErc() public { - token.mint(address(sharedBridge), amount); - - _setSharedBridgeChainBalance(chainId, address(token), amount); + function test_finalizeWithdrawal_BaseErcOnErc() public { + _setBaseTokenAssetId(tokenAssetId); + vm.prank(bridgehubAddress); bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount + IAssetRouterBase.finalizeDeposit.selector, + chainId, + tokenAssetId, + abi.encode(0, alice, 0, amount, new bytes(0)) ); - vm.mockCall(bridgehubAddress, abi.encodeWithSelector(IBridgehub.baseToken.selector), abi.encode(address(2))); //alt base token L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: l2SharedBridge, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, data: message }); @@ -385,18 +389,18 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { // solhint-disable-next-line func-named-parameters abi.encodeWithSelector( IBridgehub.proveL2MessageInclusion.selector, - chainId, - l2BatchNumber, - l2MessageIndex, - l2ToL1Message, - merkleProof + chainId + // l2BatchNumber, + // l2MessageIndex, + // l2ToL1Message, + // merkleProof ), abi.encode(true) ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, address(token), amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, tokenAssetId, abi.encode(amount, alice)); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -407,33 +411,22 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { }); } - function test_finalizeWithdrawal_EthOnEth_LegacyTx() public { - vm.deal(address(sharedBridge), amount); - uint256 legacyBatchNumber = 0; - - vm.mockCall( - l1ERC20BridgeAddress, - abi.encodeWithSelector(IL1ERC20Bridge.isWithdrawalFinalized.selector), - abi.encode(false) - ); - - vm.mockCall( - eraDiamondProxy, - abi.encodeWithSelector(IGetters.isEthWithdrawalFinalized.selector), - abi.encode(false) + function test_finalizeWithdrawal_NonBaseErcOnErc() public { + bytes memory message = abi.encodePacked( + IAssetRouterBase.finalizeDeposit.selector, + chainId, + tokenAssetId, + abi.encode(0, alice, 0, amount, new bytes(0)) ); - - _setSharedBridgeChainBalance(eraChainId, ETH_TOKEN_ADDRESS, amount); vm.mockCall( bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) + abi.encodeWithSelector(IBridgehub.baseTokenAssetId.selector), + abi.encode(bytes32(uint256(2))) ); - - bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); + //alt base token L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + sender: L2_ASSET_ROUTER_ADDR, data: message }); @@ -442,8 +435,8 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { // solhint-disable-next-line func-named-parameters abi.encodeWithSelector( IBridgehub.proveL2MessageInclusion.selector, - eraChainId, - legacyBatchNumber, + chainId, + l2BatchNumber, l2MessageIndex, l2ToL1Message, merkleProof @@ -452,15 +445,38 @@ contract L1SharedBridgeTestBase is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(eraChainId, alice, ETH_TOKEN_ADDRESS, amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, tokenAssetId, message); sharedBridge.finalizeWithdrawal({ - _chainId: eraChainId, - _l2BatchNumber: legacyBatchNumber, + _chainId: chainId, + _l2BatchNumber: l2BatchNumber, _l2MessageIndex: l2MessageIndex, _l2TxNumberInBatch: l2TxNumberInBatch, _message: message, _merkleProof: merkleProof }); } + + function test_safeTransferFundsFromSharedBridge_Erc() public { + bytes32 assetId = DataEncoding.encodeNTVAssetId(block.chainid, address(token)); + uint256 startBalanceNtv = nativeTokenVault.chainBalance(chainId, assetId); + // solhint-disable-next-line func-named-parameters + vm.expectEmit(true, true, false, true, address(token)); + emit IERC20.Transfer(address(l1Nullifier), address(nativeTokenVault), amount); + nativeTokenVault.transferFundsFromSharedBridge(address(token)); + nativeTokenVault.updateChainBalancesFromSharedBridge(address(token), chainId); + uint256 endBalanceNtv = nativeTokenVault.chainBalance(chainId, assetId); + assertEq(endBalanceNtv - startBalanceNtv, amount); + } + + function test_safeTransferFundsFromSharedBridge_Eth() public { + uint256 startEthBalanceNtv = address(nativeTokenVault).balance; + uint256 startBalanceNtv = nativeTokenVault.chainBalance(chainId, ETH_TOKEN_ASSET_ID); + nativeTokenVault.transferFundsFromSharedBridge(ETH_TOKEN_ADDRESS); + nativeTokenVault.updateChainBalancesFromSharedBridge(ETH_TOKEN_ADDRESS, chainId); + uint256 endBalanceNtv = nativeTokenVault.chainBalance(chainId, ETH_TOKEN_ASSET_ID); + uint256 endEthBalanceNtv = address(nativeTokenVault).balance; + assertEq(endBalanceNtv - startBalanceNtv, amount); + assertEq(endEthBalanceNtv - startEthBalanceNtv, amount); + } } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeFails.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeFails.t.sol new file mode 100644 index 000000000..0603a34dc --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeFails.t.sol @@ -0,0 +1,809 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import "forge-std/console.sol"; + +import {L1AssetRouterTest} from "./_L1SharedBridge_Shared.t.sol"; + +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; + +import {SET_ASSET_HANDLER_COUNTERPART_ENCODING_VERSION} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2Message, TxStatus} from "contracts/common/Messaging.sol"; +import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; +import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IGetters} from "contracts/state-transition/chain-interfaces/IGetters.sol"; +import {AddressAlreadyUsed, WithdrawFailed, Unauthorized, AssetIdNotSupported, SharedBridgeKey, SharedBridgeValueNotSet, L2WithdrawalMessageWrongLength, InsufficientChainBalance, ZeroAddress, ValueMismatch, NonEmptyMsgValue, DepositExists, ValueMismatch, NonEmptyMsgValue, TokenNotSupported, EmptyDeposit, L2BridgeNotDeployed, InvalidProof, NoFundsTransferred, InsufficientFunds, DepositDoesNotExist, WithdrawalAlreadyFinalized, InsufficientFunds, MalformedMessage, InvalidSelector, TokensWithFeesNotSupported} from "contracts/common/L1ContractErrors.sol"; +import {StdStorage, stdStorage} from "forge-std/Test.sol"; + +/// We are testing all the specified revert and require cases. +contract L1AssetRouterFailTest is L1AssetRouterTest { + using stdStorage for StdStorage; + + function test_initialize_wrongOwner() public { + vm.expectRevert(ZeroAddress.selector); + new TransparentUpgradeableProxy( + address(sharedBridgeImpl), + proxyAdmin, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + L1AssetRouter.initialize.selector, + address(0), + eraPostUpgradeFirstBatch, + eraPostUpgradeFirstBatch, + 1, + 0 + ) + ); + } + + function test_initialize_wrongOwnerNTV() public { + vm.expectRevert(abi.encodeWithSelector(ZeroAddress.selector)); + new TransparentUpgradeableProxy( + address(nativeTokenVaultImpl), + admin, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector(L1NativeTokenVault.initialize.selector, address(0), address(0)) + ); + } + + function test_transferTokenToNTV_wrongCaller() public { + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); + l1Nullifier.transferTokenToNTV(address(token)); + } + + function test_nullifyChainBalanceByNTV_wrongCaller() public { + vm.expectRevert("L1N: not NTV"); + l1Nullifier.nullifyChainBalanceByNTV(chainId, address(token)); + } + + function test_registerToken_noCode() public { + vm.expectRevert("NTV: empty token"); + nativeTokenVault.registerToken(address(0)); + } + + function test_setL1Erc20Bridge_alreadySet() public { + address currentBridge = address(sharedBridge.legacyBridge()); + vm.prank(owner); + vm.expectRevert(abi.encodeWithSelector(AddressAlreadyUsed.selector, currentBridge)); + sharedBridge.setL1Erc20Bridge(IL1ERC20Bridge(address(0))); + } + + function test_setL1Erc20Bridge_emptyAddressProvided() public { + stdstore.target(address(sharedBridge)).sig(sharedBridge.legacyBridge.selector).checked_write(address(0)); + vm.prank(owner); + vm.expectRevert(abi.encodeWithSelector(ZeroAddress.selector)); + sharedBridge.setL1Erc20Bridge(IL1ERC20Bridge(address(0))); + } + + function test_setNativeTokenVault_alreadySet() public { + vm.prank(owner); + vm.expectRevert("AR: native token v already set"); + sharedBridge.setNativeTokenVault(INativeTokenVault(address(0))); + } + + function test_setNativeTokenVault_emptyAddressProvided() public { + stdstore.target(address(sharedBridge)).sig(sharedBridge.nativeTokenVault.selector).checked_write(address(0)); + vm.prank(owner); + vm.expectRevert("AR: native token vault 0"); + sharedBridge.setNativeTokenVault(INativeTokenVault(address(0))); + } + + function test_setAssetHandlerAddressOnCounterpart_wrongCounterPartAddress() public { + bytes memory data = bytes.concat( + SET_ASSET_HANDLER_COUNTERPART_ENCODING_VERSION, + abi.encode(tokenAssetId, address(token)) + ); + + vm.prank(bridgehubAddress); + vm.expectRevert("NTV: wrong counterpart"); + sharedBridge.bridgehubDeposit(eraChainId, owner, 0, data); + } + + function test_transferFundsToSharedBridge_Eth_CallFailed() public { + vm.mockCallRevert(address(nativeTokenVault), "", "eth transfer failed"); + vm.prank(address(nativeTokenVault)); + vm.expectRevert("L1N: eth transfer failed"); + l1Nullifier.transferTokenToNTV(ETH_TOKEN_ADDRESS); + } + + function test_transferFundsToSharedBridge_Eth_0_AmountTransferred() public { + vm.deal(address(l1Nullifier), 0); + vm.prank(address(nativeTokenVault)); + vm.expectRevert(abi.encodeWithSelector(NoFundsTransferred.selector)); + nativeTokenVault.transferFundsFromSharedBridge(ETH_TOKEN_ADDRESS); + } + + function test_transferFundsToSharedBridge_Erc_0_AmountTransferred() public { + vm.prank(address(l1Nullifier)); + token.transfer(address(1), amount); + vm.prank(address(nativeTokenVault)); + vm.expectRevert("NTV: 0 amount to transfer"); + nativeTokenVault.transferFundsFromSharedBridge(address(token)); + } + + function test_transferFundsToSharedBridge_Erc_WrongAmountTransferred() public { + vm.mockCall(address(token), abi.encodeWithSelector(IERC20.balanceOf.selector), abi.encode(10)); + vm.prank(address(nativeTokenVault)); + vm.expectRevert("NTV: wrong amount transferred"); + nativeTokenVault.transferFundsFromSharedBridge(address(token)); + } + + function test_bridgehubDepositBaseToken_Eth_Token_incorrectSender() public { + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); + sharedBridge.bridgehubDepositBaseToken{value: amount}(chainId, ETH_TOKEN_ASSET_ID, alice, amount); + } + + function test_bridgehubDepositBaseToken_EthwrongMsgValue() public { + vm.deal(bridgehubAddress, amount); + vm.prank(bridgehubAddress); + vm.expectRevert(abi.encodeWithSelector(ValueMismatch.selector, amount, uint256(1))); + sharedBridge.bridgehubDepositBaseToken{value: 1}(chainId, ETH_TOKEN_ASSET_ID, alice, amount); + } + + function test_bridgehubDepositBaseToken_ErcWrongMsgValue() public { + vm.deal(bridgehubAddress, amount); + token.mint(alice, amount); + vm.prank(alice); + token.approve(address(sharedBridge), amount); + vm.prank(bridgehubAddress); + vm.expectRevert(NonEmptyMsgValue.selector); + sharedBridge.bridgehubDepositBaseToken{value: amount}(chainId, tokenAssetId, alice, amount); + } + + function test_bridgehubDepositBaseToken_ercWrongErcDepositAmount() public { + vm.mockCall(address(token), abi.encodeWithSelector(IERC20.balanceOf.selector), abi.encode(10)); + + vm.prank(bridgehubAddress); + vm.expectRevert(TokensWithFeesNotSupported.selector); + sharedBridge.bridgehubDepositBaseToken(chainId, tokenAssetId, alice, amount); + } + + function test_bridgehubDeposit_Erc_weth() public { + vm.prank(bridgehubAddress); + vm.expectRevert(abi.encodeWithSelector(TokenNotSupported.selector, l1WethAddress)); + // solhint-disable-next-line func-named-parameters + sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(l1WethAddress, amount, bob)); + } + + function test_bridgehubDeposit_Eth_baseToken() public { + vm.prank(bridgehubAddress); + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseTokenAssetId.selector), + abi.encode(ETH_TOKEN_ASSET_ID) + ); + vm.expectRevert(abi.encodeWithSelector(AssetIdNotSupported.selector, ETH_TOKEN_ASSET_ID)); + // solhint-disable-next-line func-named-parameters + sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, 0, bob)); + } + + function test_bridgehubDeposit_Eth_wrongDepositAmount() public { + _setBaseTokenAssetId(tokenAssetId); + vm.prank(bridgehubAddress); + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseTokenAssetId.selector), + abi.encode(tokenAssetId) + ); + vm.expectRevert(abi.encodeWithSelector(ValueMismatch.selector, amount, 0)); + // solhint-disable-next-line func-named-parameters + sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, amount, bob)); + } + + function test_bridgehubDeposit_Erc_msgValue() public { + vm.prank(bridgehubAddress); + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseTokenAssetId.selector), + abi.encode(ETH_TOKEN_ASSET_ID) + ); + vm.expectRevert(NonEmptyMsgValue.selector); + // solhint-disable-next-line func-named-parameters + sharedBridge.bridgehubDeposit{value: amount}(chainId, alice, 0, abi.encode(address(token), amount, bob)); + } + + function test_bridgehubDeposit_Erc_wrongDepositAmount() public { + vm.prank(bridgehubAddress); + vm.mockCall(address(token), abi.encodeWithSelector(IERC20.balanceOf.selector), abi.encode(10)); + vm.expectRevert(abi.encodeWithSelector(TokensWithFeesNotSupported.selector)); + // solhint-disable-next-line func-named-parameters + sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(address(token), amount, bob)); + } + + function test_bridgehubDeposit_Eth() public { + _setBaseTokenAssetId(tokenAssetId); + vm.prank(bridgehubAddress); + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseToken.selector), + abi.encode(address(token)) + ); + vm.expectRevert(EmptyDeposit.selector); + // solhint-disable-next-line func-named-parameters + sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, 0, bob)); + } + + function test_bridgehubConfirmL2Transaction_depositAlreadyHappened() public { + bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); + _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); + vm.prank(bridgehubAddress); + vm.expectRevert(DepositExists.selector); + sharedBridge.bridgehubConfirmL2Transaction(chainId, txDataHash, txHash); + } + + function test_finalizeWithdrawal_EthOnEth_withdrawalFailed() public { + vm.deal(address(nativeTokenVault), 0); + bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); + L2Message memory l2ToL1Message = L2Message({ + txNumberInBatch: l2TxNumberInBatch, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + data: message + }); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL2MessageInclusion.selector, + chainId, + l2BatchNumber, + l2MessageIndex, + l2ToL1Message, + merkleProof + ), + abi.encode(true) + ); + + vm.expectRevert(abi.encodeWithSelector(WithdrawFailed.selector)); + sharedBridge.finalizeWithdrawal({ + _chainId: chainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_bridgeRecoverFailedTransfer_Eth_claimFailedDepositFailed() public { + vm.deal(address(nativeTokenVault), 0); + bytes memory transferData = abi.encode(amount, alice); + bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); + _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL1ToL2TransactionStatus.selector, + chainId, + txHash, + l2BatchNumber, + l2MessageIndex, + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure + ), + abi.encode(true) + ); + + vm.expectRevert("NTV: claimFailedDeposit failed, no funds or cannot transfer to receiver"); + l1Nullifier.bridgeRecoverFailedTransfer({ + _chainId: chainId, + _depositSender: alice, + _assetId: ETH_TOKEN_ASSET_ID, + _assetData: transferData, + _l2TxHash: txHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + } + + function test_bridgeRecoverFailedTransfer_invalidChainID() public { + vm.store(address(l1Nullifier), bytes32(isWithdrawalFinalizedStorageLocation - 5), bytes32(uint256(0))); + + bytes memory transferData = abi.encode(amount, alice); + bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); + _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL1ToL2TransactionStatus.selector, + eraChainId, + txHash, + l2BatchNumber, + l2MessageIndex, + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure + ), + abi.encode(true) + ); + + vm.expectRevert( + abi.encodeWithSelector(SharedBridgeValueNotSet.selector, SharedBridgeKey.LegacyBridgeLastDepositBatch) + ); + l1Nullifier.bridgeRecoverFailedTransfer({ + _chainId: eraChainId, + _depositSender: alice, + _assetId: ETH_TOKEN_ASSET_ID, + _assetData: transferData, + _l2TxHash: txHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + } + + function test_bridgeRecoverFailedTransfer_eraLegacyDeposit() public { + vm.store(address(l1Nullifier), bytes32(isWithdrawalFinalizedStorageLocation - 5), bytes32(uint256(2))); + + uint256 l2BatchNumber = 0; + bytes memory transferData = abi.encode(amount, alice); + bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); + _setSharedBridgeDepositHappened(eraChainId, txHash, txDataHash); + require(l1Nullifier.depositHappened(eraChainId, txHash) == txDataHash, "Deposit not set"); + console.log("txDataHash", uint256(txDataHash)); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL1ToL2TransactionStatus.selector, + eraChainId, + txHash, + l2BatchNumber, + l2MessageIndex, + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure + ), + abi.encode(true) + ); + + vm.expectRevert(); + vm.mockCall( + address(bridgehubAddress), + abi.encodeWithSelector(IBridgehub.proveL1ToL2TransactionStatus.selector), + abi.encode(true) + ); + l1Nullifier.bridgeRecoverFailedTransfer({ + _chainId: eraChainId, + _depositSender: alice, + _assetId: ETH_TOKEN_ASSET_ID, + _assetData: transferData, + _l2TxHash: txHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + } + + function test_claimFailedDeposit_proofInvalid() public { + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.proveL1ToL2TransactionStatus.selector), + abi.encode(address(0)) + ); + vm.prank(bridgehubAddress); + vm.expectRevert(abi.encodeWithSelector(InvalidProof.selector)); + l1Nullifier.claimFailedDeposit({ + _chainId: chainId, + _depositSender: alice, + _l1Token: ETH_TOKEN_ADDRESS, + _amount: amount, + _l2TxHash: txHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + } + + function test_claimFailedDeposit_amountZero() public { + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL1ToL2TransactionStatus.selector, + chainId, + txHash, + l2BatchNumber, + l2MessageIndex, + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure + ), + abi.encode(true) + ); + + bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, 0)); + _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); + vm.expectRevert(abi.encodeWithSelector((NoFundsTransferred.selector))); + l1Nullifier.claimFailedDeposit({ + _chainId: chainId, + _depositSender: alice, + _l1Token: ETH_TOKEN_ADDRESS, + _amount: 0, + _l2TxHash: txHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + } + + function test_claimFailedDeposit_depositDidNotHappen() public { + vm.deal(address(sharedBridge), amount); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL1ToL2TransactionStatus.selector, + chainId, + txHash, + l2BatchNumber, + l2MessageIndex, + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure + ), + abi.encode(true) + ); + + vm.expectRevert(DepositDoesNotExist.selector); + l1Nullifier.claimFailedDeposit({ + _chainId: chainId, + _depositSender: alice, + _l1Token: ETH_TOKEN_ADDRESS, + _amount: amount, + _l2TxHash: txHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + } + + function test_claimFailedDeposit_chainBalanceLow() public { + _setNativeTokenVaultChainBalance(chainId, ETH_TOKEN_ADDRESS, 0); + + bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); + _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL1ToL2TransactionStatus.selector, + chainId, + txHash, + l2BatchNumber, + l2MessageIndex, + l2TxNumberInBatch, + merkleProof, + TxStatus.Failure + ), + abi.encode(true) + ); + + vm.expectRevert(InsufficientChainBalance.selector); + l1Nullifier.claimFailedDeposit({ + _chainId: chainId, + _depositSender: alice, + _l1Token: ETH_TOKEN_ADDRESS, + _amount: amount, + _l2TxHash: txHash, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _merkleProof: merkleProof + }); + } + + function test_finalizeWithdrawal_EthOnEth_legacyTxFinalizedInSharedBridge() public { + vm.deal(address(sharedBridge), amount); + vm.deal(address(nativeTokenVault), amount); + uint256 legacyBatchNumber = 0; + + vm.mockCall( + l1ERC20BridgeAddress, + abi.encodeWithSelector(IL1ERC20Bridge.isWithdrawalFinalized.selector), + abi.encode(false) + ); + + vm.store( + address(l1Nullifier), + keccak256( + abi.encode( + l2MessageIndex, + keccak256( + abi.encode( + legacyBatchNumber, + keccak256(abi.encode(eraChainId, isWithdrawalFinalizedStorageLocation)) + ) + ) + ) + ), + bytes32(uint256(1)) + ); + + bytes memory message = abi.encodePacked( + IL1ERC20Bridge.finalizeWithdrawal.selector, + alice, + address(token), + amount + ); + + vm.expectRevert(WithdrawalAlreadyFinalized.selector); + sharedBridge.finalizeWithdrawal({ + _chainId: eraChainId, + _l2BatchNumber: legacyBatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_finalizeWithdrawal_EthOnEth_diamondUpgradeFirstBatchNotSet() public { + vm.store(address(l1Nullifier), bytes32(isWithdrawalFinalizedStorageLocation - 7), bytes32(uint256(0))); + vm.deal(address(l1Nullifier), amount); + vm.deal(address(nativeTokenVault), amount); + + bytes memory message = abi.encodePacked( + IL1ERC20Bridge.finalizeWithdrawal.selector, + alice, + address(token), + amount + ); + vm.expectRevert(); + + sharedBridge.finalizeWithdrawal({ + _chainId: eraChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_finalizeWithdrawal_TokenOnEth_legacyTokenWithdrawal() public { + vm.store(address(l1Nullifier), bytes32(isWithdrawalFinalizedStorageLocation - 6), bytes32(uint256(5))); + vm.deal(address(nativeTokenVault), amount); + + bytes memory message = abi.encodePacked( + IL1ERC20Bridge.finalizeWithdrawal.selector, + alice, + address(token), + amount + ); + vm.expectRevert(); + + sharedBridge.finalizeWithdrawal({ + _chainId: eraChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_finalizeWithdrawal_TokenOnEth_legacyUpgradeFirstBatchNotSet() public { + vm.store(address(l1Nullifier), bytes32(isWithdrawalFinalizedStorageLocation - 7), bytes32(uint256(0))); + vm.deal(address(nativeTokenVault), amount); + + bytes memory message = abi.encodePacked( + IL1ERC20Bridge.finalizeWithdrawal.selector, + alice, + address(token), + amount + ); + + vm.mockCall(bridgehubAddress, abi.encode(IBridgehub.proveL2MessageInclusion.selector), abi.encode(true)); + + vm.expectRevert( + abi.encodeWithSelector(SharedBridgeValueNotSet.selector, SharedBridgeKey.PostUpgradeFirstBatch) + ); + sharedBridge.finalizeWithdrawal({ + _chainId: eraChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_finalizeWithdrawal_chainBalance() public { + bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); + L2Message memory l2ToL1Message = L2Message({ + txNumberInBatch: l2TxNumberInBatch, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + data: message + }); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL2MessageInclusion.selector, + chainId, + l2BatchNumber, + l2MessageIndex, + l2ToL1Message, + merkleProof + ), + abi.encode(true) + ); + _setNativeTokenVaultChainBalance(chainId, ETH_TOKEN_ADDRESS, 1); + + vm.expectRevert(InsufficientChainBalance.selector); + sharedBridge.finalizeWithdrawal({ + _chainId: chainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_checkWithdrawal_wrongProof() public { + bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); + L2Message memory l2ToL1Message = L2Message({ + txNumberInBatch: l2TxNumberInBatch, + sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + data: message + }); + + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector( + IBridgehub.proveL2MessageInclusion.selector, + chainId, + l2BatchNumber, + l2MessageIndex, + l2ToL1Message, + merkleProof + ), + abi.encode(false) + ); + + vm.expectRevert(InvalidProof.selector); + sharedBridge.finalizeWithdrawal({ + _chainId: chainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_parseL2WithdrawalMessage_wrongMsgLength() public { + bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector); + + vm.expectRevert(abi.encodeWithSelector(L2WithdrawalMessageWrongLength.selector, message.length)); + sharedBridge.finalizeWithdrawal({ + _chainId: chainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_parseL2WithdrawalMessage_WrongMsgLength2() public { + vm.deal(address(sharedBridge), amount); + + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseToken.selector, alice, amount), + abi.encode(ETH_TOKEN_ADDRESS) + ); + + bytes memory message = abi.encodePacked(IL1ERC20Bridge.finalizeWithdrawal.selector, alice, amount); + // should have more data here + + vm.expectRevert(abi.encodeWithSelector(L2WithdrawalMessageWrongLength.selector, message.length)); + sharedBridge.finalizeWithdrawal({ + _chainId: chainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_parseL2WithdrawalMessage_wrongSelector() public { + // notice that the selector is wrong + bytes memory message = abi.encodePacked(IMailbox.proveL2LogInclusion.selector, alice, amount); + + vm.expectRevert(abi.encodeWithSelector(InvalidSelector.selector, IMailbox.proveL2LogInclusion.selector)); + sharedBridge.finalizeWithdrawal({ + _chainId: eraChainId, + _l2BatchNumber: l2BatchNumber, + _l2MessageIndex: l2MessageIndex, + _l2TxNumberInBatch: l2TxNumberInBatch, + _message: message, + _merkleProof: merkleProof + }); + } + + function test_depositLegacyERC20Bridge_weth() public { + uint256 l2TxGasLimit = 100000; + uint256 l2TxGasPerPubdataByte = 100; + address refundRecipient = address(0); + + vm.expectRevert(abi.encodeWithSelector(TokenNotSupported.selector, l1WethAddress)); + vm.prank(l1ERC20BridgeAddress); + sharedBridge.depositLegacyErc20Bridge({ + _originalCaller: alice, + _l2Receiver: bob, + _l1Token: l1WethAddress, + _amount: amount, + _l2TxGasLimit: l2TxGasLimit, + _l2TxGasPerPubdataByte: l2TxGasPerPubdataByte, + _refundRecipient: refundRecipient + }); + } + + function test_depositLegacyERC20Bridge_refundRecipient() public { + uint256 l2TxGasLimit = 100000; + uint256 l2TxGasPerPubdataByte = 100; + + // solhint-disable-next-line func-named-parameters + vm.expectEmit(true, true, true, true, address(sharedBridge)); + + emit LegacyDepositInitiated({ + chainId: eraChainId, + l2DepositTxHash: txHash, + from: alice, + to: bob, + l1Token: address(token), + amount: amount + }); + + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.requestL2TransactionDirect.selector), + abi.encode(txHash) + ); + + vm.prank(l1ERC20BridgeAddress); + sharedBridge.depositLegacyErc20Bridge({ + _originalCaller: alice, + _l2Receiver: bob, + _l1Token: address(token), + _amount: amount, + _l2TxGasLimit: l2TxGasLimit, + _l2TxGasPerPubdataByte: l2TxGasPerPubdataByte, + _refundRecipient: address(1) + }); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeHyperEnabled.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeHyperEnabled.t.sol similarity index 65% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeHyperEnabled.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeHyperEnabled.t.sol index b5e8e5467..6dc2da4e4 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeHyperEnabled.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeHyperEnabled.t.sol @@ -1,27 +1,28 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -import {L1SharedBridgeTest} from "./_L1SharedBridge_Shared.t.sol"; +import {L1AssetRouterTest} from "./_L1SharedBridge_Shared.t.sol"; import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; import {L2Message, TxStatus} from "contracts/common/Messaging.sol"; import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; -import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; -import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, L2_ASSET_ROUTER_ADDR} from "contracts/common/L2ContractAddresses.sol"; // note, this should be the same as where hyper is disabled -contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { +contract L1AssetRouterHyperEnabledTest is L1AssetRouterTest { function test_bridgehubDepositBaseToken_Eth() public { vm.deal(bridgehubAddress, amount); vm.prank(bridgehubAddress); // solhint-disable-next-line func-named-parameters vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit BridgehubDepositBaseTokenInitiated(chainId, alice, ETH_TOKEN_ADDRESS, amount); + emit BridgehubDepositBaseTokenInitiated(chainId, alice, ETH_TOKEN_ASSET_ID, amount); sharedBridge.bridgehubDepositBaseToken{value: amount}({ _chainId: chainId, - _prevMsgSender: alice, - _l1Token: ETH_TOKEN_ADDRESS, + _assetId: ETH_TOKEN_ASSET_ID, + _originalCaller: alice, _amount: amount }); } @@ -33,69 +34,57 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { vm.prank(bridgehubAddress); // solhint-disable-next-line func-named-parameters vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit BridgehubDepositBaseTokenInitiated(chainId, alice, address(token), amount); + emit BridgehubDepositBaseTokenInitiated(chainId, alice, tokenAssetId, amount); sharedBridge.bridgehubDepositBaseToken({ _chainId: chainId, - _prevMsgSender: alice, - _l1Token: address(token), + _assetId: tokenAssetId, + _originalCaller: alice, _amount: amount }); } function test_bridgehubDeposit_Eth() public { - vm.deal(bridgehubAddress, amount); - vm.prank(bridgehubAddress); + // vm.prank(bridgehubAddress); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + _setBaseTokenAssetId(tokenAssetId); + vm.prank(bridgehubAddress); bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); emit BridgehubDepositInitiated({ chainId: chainId, txDataHash: txDataHash, from: alice, - to: zkSync, - l1Token: ETH_TOKEN_ADDRESS, - amount: amount + assetId: ETH_TOKEN_ASSET_ID, + bridgeMintCalldata: abi.encode(0, bob) }); sharedBridge.bridgehubDeposit{value: amount}({ _chainId: chainId, - _prevMsgSender: alice, - _l2Value: 0, - _data: abi.encode(ETH_TOKEN_ADDRESS, 0, bob) + _originalCaller: alice, + _value: amount, + _data: abi.encode(ETH_TOKEN_ADDRESS, amount, bob) }); } function test_bridgehubDeposit_Erc() public { - token.mint(alice, amount); - vm.prank(alice); - token.approve(address(sharedBridge), amount); vm.prank(bridgehubAddress); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + _setBaseTokenAssetId(ETH_TOKEN_ASSET_ID); + bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); emit BridgehubDepositInitiated({ chainId: chainId, txDataHash: txDataHash, from: alice, - to: zkSync, - l1Token: address(token), - amount: amount + assetId: tokenAssetId, + bridgeMintCalldata: abi.encode(amount, bob) }); sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(address(token), amount, bob)); } function test_bridgehubConfirmL2Transaction() public { // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); + vm.expectEmit(true, true, true, true, address(l1Nullifier)); bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); emit BridgehubDepositFinalized(chainId, txDataHash, txHash); vm.prank(bridgehubAddress); @@ -108,9 +97,9 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { // storing depositHappened[chainId][l2TxHash] = txDataHash. bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); - require(sharedBridge.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); - _setSharedBridgeChainBalance(chainId, address(token), amount); + _setNativeTokenVaultChainBalance(chainId, address(token), amount); vm.mockCall( bridgehubAddress, @@ -129,10 +118,10 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit ClaimedFailedDepositSharedBridge(chainId, alice, address(token), amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit ClaimedFailedDepositAssetRouter(chainId, tokenAssetId, abi.encode(bytes32(0))); vm.prank(bridgehubAddress); - sharedBridge.claimFailedDeposit({ + l1Nullifier.claimFailedDeposit({ _chainId: chainId, _depositSender: alice, _l1Token: address(token), @@ -146,19 +135,14 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { } function test_claimFailedDeposit_Eth() public { - vm.deal(address(sharedBridge), amount); - // storing depositHappened[chainId][l2TxHash] = txDataHash. bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); - require(sharedBridge.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); - - /// storing chainBalance - _setSharedBridgeChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); + require(l1Nullifier.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); // Bridgehub bridgehub = new Bridgehub(); // vm.store(address(bridgehub), bytes32(uint256(5 +2)), bytes32(uint256(31337))); - // require(address(bridgehub.deployer()) == address(31337), "Bridgehub: deployer wrong"); + // require(address(bridgehub.deployer()) == address(31337), "BH: deployer wrong"); vm.mockCall( bridgehubAddress, @@ -177,10 +161,10 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit ClaimedFailedDepositSharedBridge(chainId, alice, ETH_TOKEN_ADDRESS, amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit ClaimedFailedDepositAssetRouter(chainId, ETH_TOKEN_ASSET_ID, abi.encode(bytes32(0))); vm.prank(bridgehubAddress); - sharedBridge.claimFailedDeposit({ + l1Nullifier.claimFailedDeposit({ _chainId: chainId, _depositSender: alice, _l1Token: ETH_TOKEN_ADDRESS, @@ -194,15 +178,7 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { } function test_finalizeWithdrawal_EthOnEth() public { - vm.deal(address(sharedBridge), amount); - - /// storing chainBalance - _setSharedBridgeChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); + _setBaseTokenAssetId(ETH_TOKEN_ASSET_ID); bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); L2Message memory l2ToL1Message = L2Message({ @@ -226,8 +202,8 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, ETH_TOKEN_ADDRESS, amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, ETH_TOKEN_ASSET_ID, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -239,26 +215,17 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { } function test_finalizeWithdrawal_ErcOnEth() public { - token.mint(address(sharedBridge), amount); - - /// storing chainBalance - _setSharedBridgeChainBalance(chainId, address(token), amount); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); + _setBaseTokenAssetId(ETH_TOKEN_ASSET_ID); bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount + IAssetRouterBase.finalizeDeposit.selector, + chainId, + tokenAssetId, + abi.encode(0, alice, 0, amount, new bytes(0)) ); L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: l2SharedBridge, + sender: L2_ASSET_ROUTER_ADDR, data: message }); @@ -277,8 +244,8 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, address(token), amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, tokenAssetId, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -290,25 +257,17 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { } function test_finalizeWithdrawal_EthOnErc() public { - vm.deal(address(sharedBridge), amount); - - /// storing chainBalance - _setSharedBridgeChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); + _setBaseTokenAssetId(tokenAssetId); bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - ETH_TOKEN_ADDRESS, - amount + IAssetRouterBase.finalizeDeposit.selector, + chainId, + ETH_TOKEN_ASSET_ID, + abi.encode(0, alice, 0, amount, new bytes(0)) ); L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: l2SharedBridge, + sender: L2_ASSET_ROUTER_ADDR, data: message }); @@ -327,8 +286,8 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, ETH_TOKEN_ADDRESS, amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, ETH_TOKEN_ASSET_ID, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -340,22 +299,13 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { } function test_finalizeWithdrawal_BaseErcOnErc() public { - token.mint(address(sharedBridge), amount); - - /// storing chainBalance - _setSharedBridgeChainBalance(chainId, address(token), amount); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); + _setBaseTokenAssetId(tokenAssetId); bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount + IAssetRouterBase.finalizeDeposit.selector, + chainId, + tokenAssetId, + abi.encode(0, alice, 0, amount, new bytes(0)) ); L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, @@ -368,18 +318,18 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { // solhint-disable-next-line func-named-parameters abi.encodeWithSelector( IBridgehub.proveL2MessageInclusion.selector, - chainId, - l2BatchNumber, - l2MessageIndex, - l2ToL1Message, - merkleProof + chainId + // l2BatchNumber, + // l2MessageIndex, + // l2ToL1Message, + // merkleProof ), abi.encode(true) ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, address(token), amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, tokenAssetId, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, @@ -390,22 +340,17 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { }); } - function test_finalizeWithdrawal_NonBaseErcOnErc() public { - token.mint(address(sharedBridge), amount); - - /// storing chainBalance - _setSharedBridgeChainBalance(chainId, address(token), amount); - + function test_finalizeWithdrawal_NonBaseErcOnErc2() public { bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount + IAssetRouterBase.finalizeDeposit.selector, + chainId, + tokenAssetId, + abi.encode(0, alice, 0, amount, new bytes(0)) ); - vm.mockCall(bridgehubAddress, abi.encodeWithSelector(IBridgehub.baseToken.selector), abi.encode(address(2))); //alt base token + _setBaseTokenAssetId(bytes32(uint256(2))); //alt base token L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: l2SharedBridge, + sender: L2_ASSET_ROUTER_ADDR, data: message }); @@ -424,8 +369,8 @@ contract L1SharedBridgeHyperEnabledTest is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(chainId, alice, address(token), amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(chainId, tokenAssetId, message); sharedBridge.finalizeWithdrawal({ _chainId: chainId, _l2BatchNumber: l2BatchNumber, diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeLegacy.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeLegacy.t.sol similarity index 52% rename from l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeLegacy.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeLegacy.t.sol index 83e83df9c..788446502 100644 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeLegacy.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeLegacy.t.sol @@ -1,16 +1,19 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -import {L1SharedBridgeTest} from "./_L1SharedBridge_Shared.t.sol"; +import "forge-std/console.sol"; + +import {L1AssetRouterTest} from "./_L1SharedBridge_Shared.t.sol"; import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; import {L2Message, TxStatus} from "contracts/common/Messaging.sol"; import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; -import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, L2_ASSET_ROUTER_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {FinalizeL1DepositParams} from "contracts/bridge/interfaces/IL1Nullifier.sol"; -contract L1SharedBridgeLegacyTest is L1SharedBridgeTest { +contract L1AssetRouterLegacyTest is L1AssetRouterTest { function test_depositLegacyERC20Bridge() public { uint256 l2TxGasLimit = 100000; uint256 l2TxGasPerPubdataByte = 100; @@ -36,7 +39,7 @@ contract L1SharedBridgeLegacyTest is L1SharedBridgeTest { vm.prank(l1ERC20BridgeAddress); sharedBridge.depositLegacyErc20Bridge({ - _prevMsgSender: alice, + _originalCaller: alice, _l2Receiver: bob, _l1Token: address(token), _amount: amount, @@ -50,7 +53,7 @@ contract L1SharedBridgeLegacyTest is L1SharedBridgeTest { vm.deal(address(sharedBridge), amount); /// storing chainBalance - _setSharedBridgeChainBalance(eraChainId, ETH_TOKEN_ADDRESS, amount); + _setNativeTokenVaultChainBalance(eraChainId, ETH_TOKEN_ADDRESS, amount); vm.mockCall( bridgehubAddress, abi.encodeWithSelector(IBridgehub.baseToken.selector), @@ -79,28 +82,24 @@ contract L1SharedBridgeLegacyTest is L1SharedBridgeTest { ); // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(eraChainId, alice, ETH_TOKEN_ADDRESS, amount); + vm.expectEmit(true, true, true, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(eraChainId, ETH_TOKEN_ASSET_ID, message); vm.prank(l1ERC20BridgeAddress); - sharedBridge.finalizeWithdrawalLegacyErc20Bridge({ - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof + FinalizeL1DepositParams memory finalizeWithdrawalParams = FinalizeL1DepositParams({ + chainId: eraChainId, + l2BatchNumber: l2BatchNumber, + l2MessageIndex: l2MessageIndex, + l2Sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, + l2TxNumberInBatch: l2TxNumberInBatch, + message: message, + merkleProof: merkleProof }); + l1Nullifier.finalizeDeposit(finalizeWithdrawalParams); } function test_finalizeWithdrawalLegacyErc20Bridge_ErcOnEth() public { - token.mint(address(sharedBridge), amount); - /// storing chainBalance - _setSharedBridgeChainBalance(eraChainId, address(token), amount); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); + _setNativeTokenVaultChainBalance(eraChainId, address(token), amount); // solhint-disable-next-line func-named-parameters bytes memory message = abi.encodePacked( @@ -111,7 +110,7 @@ contract L1SharedBridgeLegacyTest is L1SharedBridgeTest { ); L2Message memory l2ToL1Message = L2Message({ txNumberInBatch: l2TxNumberInBatch, - sender: l2SharedBridge, + sender: L2_ASSET_ROUTER_ADDR, data: message }); @@ -128,64 +127,25 @@ contract L1SharedBridgeLegacyTest is L1SharedBridgeTest { ), abi.encode(true) ); - - // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit WithdrawalFinalizedSharedBridge(eraChainId, alice, address(token), amount); - vm.prank(l1ERC20BridgeAddress); - sharedBridge.finalizeWithdrawalLegacyErc20Bridge({ - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_claimFailedDepositLegacyErc20Bridge_Erc() public { - token.mint(address(sharedBridge), amount); - - // storing depositHappened[chainId][l2TxHash] = txDataHash. - bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); - _setSharedBridgeDepositHappened(eraChainId, txHash, txDataHash); - require(sharedBridge.depositHappened(eraChainId, txHash) == txDataHash, "Deposit not set"); - - _setSharedBridgeChainBalance(eraChainId, address(token), amount); - - // Bridgehub bridgehub = new Bridgehub(); - // vm.store(address(bridgehub), bytes32(uint256(5 +2)), bytes32(uint256(31337))); - // require(address(bridgehub.deployer()) == address(31337), "Bridgehub: deployer wrong"); - - vm.mockCall( - bridgehubAddress, - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - IBridgehub.proveL1ToL2TransactionStatus.selector, - eraChainId, - txHash, - l2BatchNumber, - l2MessageIndex, - l2TxNumberInBatch, - merkleProof, - TxStatus.Failure - ), - abi.encode(true) + // console.log(sharedBridge.) + vm.store( + address(sharedBridge), + keccak256(abi.encode(tokenAssetId, isWithdrawalFinalizedStorageLocation + 2)), + bytes32(uint256(uint160(address(nativeTokenVault)))) ); - // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - emit ClaimedFailedDepositSharedBridge(eraChainId, alice, address(token), amount); + vm.expectEmit(true, true, false, false, address(sharedBridge)); + emit DepositFinalizedAssetRouter(eraChainId, tokenAssetId, new bytes(0)); vm.prank(l1ERC20BridgeAddress); - - sharedBridge.claimFailedDepositLegacyErc20Bridge({ - _depositSender: alice, - _l1Token: address(token), - _amount: amount, - _l2TxHash: txHash, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _merkleProof: merkleProof + FinalizeL1DepositParams memory finalizeWithdrawalParams = FinalizeL1DepositParams({ + chainId: eraChainId, + l2BatchNumber: l2BatchNumber, + l2MessageIndex: l2MessageIndex, + l2Sender: L2_ASSET_ROUTER_ADDR, + l2TxNumberInBatch: l2TxNumberInBatch, + message: message, + merkleProof: merkleProof }); + l1Nullifier.finalizeDeposit(finalizeWithdrawalParams); } } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/_L1SharedBridge_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/_L1SharedBridge_Shared.t.sol new file mode 100644 index 000000000..de655e7ee --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Bridges/L1SharedBridge/_L1SharedBridge_Shared.t.sol @@ -0,0 +1,293 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {StdStorage, stdStorage} from "forge-std/Test.sol"; +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; + +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {IL1NativeTokenVault} from "contracts/bridge/ntv/IL1NativeTokenVault.sol"; +import {INativeTokenVault} from "contracts/bridge/ntv/INativeTokenVault.sol"; +import {IL1AssetHandler} from "contracts/bridge/interfaces/IL1AssetHandler.sol"; +import {IL1BaseTokenAssetHandler} from "contracts/bridge/interfaces/IL1BaseTokenAssetHandler.sol"; +import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {L2_NATIVE_TOKEN_VAULT_ADDR, L2_ASSET_ROUTER_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; + +contract L1AssetRouterTest is Test { + using stdStorage for StdStorage; + + event BridgehubDepositBaseTokenInitiated( + uint256 indexed chainId, + address indexed from, + bytes32 assetId, + uint256 amount + ); + + event BridgehubDepositInitiated( + uint256 indexed chainId, + bytes32 indexed txDataHash, + address indexed from, + bytes32 assetId, + bytes bridgeMintCalldata + ); + + event BridgehubDepositFinalized( + uint256 indexed chainId, + bytes32 indexed txDataHash, + bytes32 indexed l2DepositTxHash + ); + + event DepositFinalizedAssetRouter(uint256 indexed chainId, bytes32 indexed assetId, bytes assetData); + + event ClaimedFailedDepositAssetRouter(uint256 indexed chainId, bytes32 indexed assetId, bytes assetData); + + event LegacyDepositInitiated( + uint256 indexed chainId, + bytes32 indexed l2DepositTxHash, + address indexed from, + address to, + address l1Token, + uint256 amount + ); + + L1AssetRouter sharedBridgeImpl; + L1AssetRouter sharedBridge; + L1NativeTokenVault nativeTokenVaultImpl; + L1NativeTokenVault nativeTokenVault; + L1Nullifier l1NullifierImpl; + L1Nullifier l1Nullifier; + address bridgehubAddress; + address l1ERC20BridgeAddress; + address l1WethAddress; + address l2SharedBridge; + address l1NullifierAddress; + TestnetERC20Token token; + bytes32 tokenAssetId; + uint256 eraPostUpgradeFirstBatch; + + address owner; + address admin; + address proxyAdmin; + address zkSync; + address alice; + address bob; + uint256 chainId; + uint256 amount = 100; + uint256 mintValue = 1; + bytes32 txHash; + uint256 gas = 1_000_000; + + uint256 eraChainId; + uint256 randomChainId; + address eraDiamondProxy; + address eraErc20BridgeAddress; + + uint256 l2BatchNumber; + uint256 l2MessageIndex; + uint16 l2TxNumberInBatch; + bytes32[] merkleProof; + uint256 legacyBatchNumber = 0; + + uint256 isWithdrawalFinalizedStorageLocation = uint256(8 - 1 + (1 + 49) + 0 + (1 + 49) + 50 + 1 + 50); + bytes32 ETH_TOKEN_ASSET_ID = keccak256(abi.encode(block.chainid, L2_NATIVE_TOKEN_VAULT_ADDR, ETH_TOKEN_ADDRESS)); + + function setUp() public { + owner = makeAddr("owner"); + admin = makeAddr("admin"); + proxyAdmin = makeAddr("proxyAdmin"); + // zkSync = makeAddr("zkSync"); + bridgehubAddress = makeAddr("bridgehub"); + alice = makeAddr("alice"); + // bob = makeAddr("bob"); + l1WethAddress = makeAddr("weth"); + l1ERC20BridgeAddress = makeAddr("l1ERC20Bridge"); + l2SharedBridge = makeAddr("l2SharedBridge"); + + txHash = bytes32(uint256(uint160(makeAddr("txHash")))); + l2BatchNumber = 3; //uint256(uint160(makeAddr("l2BatchNumber"))); + l2MessageIndex = uint256(uint160(makeAddr("l2MessageIndex"))); + l2TxNumberInBatch = uint16(uint160(makeAddr("l2TxNumberInBatch"))); + merkleProof = new bytes32[](1); + eraPostUpgradeFirstBatch = 1; + + chainId = 1; + eraChainId = 9; + randomChainId = 999; + eraDiamondProxy = makeAddr("eraDiamondProxy"); + eraErc20BridgeAddress = makeAddr("eraErc20BridgeAddress"); + + token = new TestnetERC20Token("TestnetERC20Token", "TET", 18); + l1NullifierImpl = new L1Nullifier({ + _bridgehub: IBridgehub(bridgehubAddress), + _eraChainId: eraChainId, + _eraDiamondProxy: eraDiamondProxy + }); + TransparentUpgradeableProxy l1NullifierProxy = new TransparentUpgradeableProxy( + address(l1NullifierImpl), + proxyAdmin, + abi.encodeWithSelector(L1Nullifier.initialize.selector, owner, 1, 1, 1, 0) + ); + l1Nullifier = L1Nullifier(payable(l1NullifierProxy)); + sharedBridgeImpl = new L1AssetRouter({ + _l1WethAddress: l1WethAddress, + _bridgehub: bridgehubAddress, + _l1Nullifier: address(l1Nullifier), + _eraChainId: eraChainId, + _eraDiamondProxy: eraDiamondProxy + }); + TransparentUpgradeableProxy sharedBridgeProxy = new TransparentUpgradeableProxy( + address(sharedBridgeImpl), + proxyAdmin, + abi.encodeWithSelector(L1AssetRouter.initialize.selector, owner) + ); + sharedBridge = L1AssetRouter(payable(sharedBridgeProxy)); + nativeTokenVaultImpl = new L1NativeTokenVault({ + _l1WethAddress: l1WethAddress, + _l1AssetRouter: address(sharedBridge), + _eraChainId: eraChainId, + _l1Nullifier: l1Nullifier + }); + address tokenBeacon = makeAddr("tokenBeacon"); + TransparentUpgradeableProxy nativeTokenVaultProxy = new TransparentUpgradeableProxy( + address(nativeTokenVaultImpl), + proxyAdmin, + abi.encodeWithSelector(L1NativeTokenVault.initialize.selector, owner, tokenBeacon) + ); + nativeTokenVault = L1NativeTokenVault(payable(nativeTokenVaultProxy)); + + vm.prank(owner); + l1Nullifier.setL1AssetRouter(address(sharedBridge)); + vm.prank(owner); + l1Nullifier.setL1NativeTokenVault(nativeTokenVault); + vm.prank(owner); + l1Nullifier.setL1Erc20Bridge(IL1ERC20Bridge(l1ERC20BridgeAddress)); + vm.prank(owner); + sharedBridge.setL1Erc20Bridge(IL1ERC20Bridge(l1ERC20BridgeAddress)); + tokenAssetId = DataEncoding.encodeNTVAssetId(block.chainid, address(token)); + vm.prank(owner); + sharedBridge.setNativeTokenVault(INativeTokenVault(address(nativeTokenVault))); + vm.prank(address(nativeTokenVault)); + nativeTokenVault.registerToken(address(token)); + nativeTokenVault.registerEthToken(); + vm.prank(owner); + + vm.store( + address(sharedBridge), + bytes32(isWithdrawalFinalizedStorageLocation), + bytes32(eraPostUpgradeFirstBatch) + ); + vm.store( + address(sharedBridge), + bytes32(isWithdrawalFinalizedStorageLocation + 1), + bytes32(eraPostUpgradeFirstBatch) + ); + vm.store(address(sharedBridge), bytes32(isWithdrawalFinalizedStorageLocation + 2), bytes32(uint256(1))); + vm.store(address(sharedBridge), bytes32(isWithdrawalFinalizedStorageLocation + 3), bytes32(0)); + + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseTokenAssetId.selector), + abi.encode(ETH_TOKEN_ASSET_ID) + ); + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseTokenAssetId.selector, chainId), + abi.encode(ETH_TOKEN_ASSET_ID) + ); + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.requestL2TransactionDirect.selector), + abi.encode(txHash) + ); + + token.mint(address(nativeTokenVault), amount); + + /// storing chainBalance + _setNativeTokenVaultChainBalance(chainId, address(token), 1000 * amount); + _setNativeTokenVaultChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); + // console.log("chainBalance %s, %s", address(token), nativeTokenVault.chainBalance(chainId, address(token))); + _setSharedBridgeChainBalance(chainId, address(token), amount); + _setSharedBridgeChainBalance(chainId, ETH_TOKEN_ADDRESS, amount); + + vm.deal(bridgehubAddress, amount); + vm.deal(address(sharedBridge), amount); + vm.deal(address(l1Nullifier), amount); + vm.deal(address(nativeTokenVault), amount); + token.mint(alice, amount); + token.mint(address(sharedBridge), amount); + token.mint(address(nativeTokenVault), amount); + token.mint(address(l1Nullifier), amount); + vm.prank(alice); + token.approve(address(sharedBridge), amount); + vm.prank(alice); + token.approve(address(nativeTokenVault), amount); + vm.prank(alice); + token.approve(address(l1Nullifier), amount); + + _setBaseTokenAssetId(ETH_TOKEN_ASSET_ID); + _setNativeTokenVaultChainBalance(chainId, address(token), amount); + + vm.mockCall( + address(nativeTokenVault), + abi.encodeWithSelector(IL1BaseTokenAssetHandler.tokenAddress.selector, tokenAssetId), + abi.encode(address(token)) + ); + vm.mockCall( + address(nativeTokenVault), + abi.encodeWithSelector(IL1BaseTokenAssetHandler.tokenAddress.selector, ETH_TOKEN_ASSET_ID), + abi.encode(address(ETH_TOKEN_ADDRESS)) + ); + vm.mockCall( + bridgehubAddress, + // solhint-disable-next-line func-named-parameters + abi.encodeWithSelector(IBridgehub.baseToken.selector, chainId), + abi.encode(ETH_TOKEN_ADDRESS) + ); + } + + function _setSharedBridgeDepositHappened(uint256 _chainId, bytes32 _txHash, bytes32 _txDataHash) internal { + stdstore + .target(address(l1Nullifier)) + .sig(l1Nullifier.depositHappened.selector) + .with_key(_chainId) + .with_key(_txHash) + .checked_write(_txDataHash); + } + + function _setNativeTokenVaultChainBalance(uint256 _chainId, address _token, uint256 _value) internal { + bytes32 assetId = DataEncoding.encodeNTVAssetId(block.chainid, _token); + stdstore + .target(address(nativeTokenVault)) + .sig(nativeTokenVault.chainBalance.selector) + .with_key(_chainId) + .with_key(assetId) + .checked_write(_value); + } + + function _setSharedBridgeChainBalance(uint256 _chainId, address _token, uint256 _value) internal { + stdstore + .target(address(l1Nullifier)) + .sig(l1Nullifier.chainBalance.selector) + .with_key(_chainId) + .with_key(_token) + .checked_write(_value); + } + + function _setBaseTokenAssetId(bytes32 _assetId) internal { + // vm.prank(bridgehubAddress); + vm.mockCall( + bridgehubAddress, + abi.encodeWithSelector(IBridgehub.baseTokenAssetId.selector, chainId), + abi.encode(_assetId) + ); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/DiamondCut/FacetCut.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/FacetCut.t.sol similarity index 91% rename from l1-contracts/test/foundry/unit/concrete/DiamondCut/FacetCut.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/FacetCut.t.sol index adceecddb..2dd88c178 100644 --- a/l1-contracts/test/foundry/unit/concrete/DiamondCut/FacetCut.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/FacetCut.t.sol @@ -9,6 +9,7 @@ import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Execut import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {ReplaceFunctionFacetAddressZero, RemoveFunctionFacetAddressNotZero, FacetExists, SelectorsMustAllHaveSameFreezability, AddressHasNoCode, NonZeroAddress, ZeroAddress} from "contracts/common/L1ContractErrors.sol"; contract FacetCutTest is DiamondCutTest { MailboxFacet private mailboxFacet; @@ -19,17 +20,17 @@ contract FacetCutTest is DiamondCutTest { function getExecutorSelectors() private view returns (bytes4[] memory) { bytes4[] memory selectors = new bytes4[](4); - selectors[0] = executorFacet1.commitBatches.selector; - selectors[1] = executorFacet1.proveBatches.selector; - selectors[2] = executorFacet1.executeBatches.selector; - selectors[3] = executorFacet1.revertBatches.selector; + selectors[0] = executorFacet1.commitBatchesSharedBridge.selector; + selectors[1] = executorFacet1.proveBatchesSharedBridge.selector; + selectors[2] = executorFacet1.executeBatchesSharedBridge.selector; + selectors[3] = executorFacet1.revertBatchesSharedBridge.selector; return selectors; } function setUp() public { eraChainId = 9; diamondCutTestContract = new DiamondCutTestContract(); - mailboxFacet = new MailboxFacet(eraChainId); + mailboxFacet = new MailboxFacet(eraChainId, block.chainid); gettersFacet = new GettersFacet(); executorFacet1 = new ExecutorFacet(); executorFacet2 = new ExecutorFacet(); @@ -88,7 +89,9 @@ contract FacetCutTest is DiamondCutTest { diamondCutTestContract.diamondCut(diamondCutData); - vm.expectRevert(abi.encodePacked("J")); + vm.expectRevert( + abi.encodeWithSelector(FacetExists.selector, Utils.getMailboxSelectors()[0], address(mailboxFacet)) + ); diamondCutTestContract.diamondCut(diamondCutData); } @@ -107,7 +110,7 @@ contract FacetCutTest is DiamondCutTest { initCalldata: bytes("") }); - vm.expectRevert(abi.encodePacked("G")); + vm.expectRevert(abi.encodeWithSelector(AddressHasNoCode.selector, address(0))); diamondCutTestContract.diamondCut(diamondCutData); } @@ -126,7 +129,7 @@ contract FacetCutTest is DiamondCutTest { initCalldata: bytes("") }); - vm.expectRevert(abi.encodePacked("L")); + vm.expectRevert(ReplaceFunctionFacetAddressZero.selector); diamondCutTestContract.diamondCut(diamondCutData); } @@ -145,7 +148,7 @@ contract FacetCutTest is DiamondCutTest { initCalldata: bytes("") }); - vm.expectRevert(abi.encodePacked("a1")); + vm.expectRevert(abi.encodeWithSelector(RemoveFunctionFacetAddressNotZero.selector, address(mailboxFacet))); diamondCutTestContract.diamondCut(diamondCutData); } @@ -288,7 +291,7 @@ contract FacetCutTest is DiamondCutTest { initCalldata: bytes("") }); - vm.expectRevert(abi.encodePacked("G")); + vm.expectRevert(abi.encodeWithSelector(AddressHasNoCode.selector, address(1))); diamondCutTestContract.diamondCut(diamondCutData1); } @@ -310,7 +313,7 @@ contract FacetCutTest is DiamondCutTest { initCalldata: bytes("") }); - vm.expectRevert(abi.encodePacked("K")); + vm.expectRevert(abi.encodeWithSelector(AddressHasNoCode.selector, address(1))); diamondCutTestContract.diamondCut(diamondCutData1); } @@ -341,7 +344,7 @@ contract FacetCutTest is DiamondCutTest { initCalldata: bytes("") }); - vm.expectRevert(abi.encodePacked("J1")); + vm.expectRevert(SelectorsMustAllHaveSameFreezability.selector); diamondCutTestContract.diamondCut(diamondCutData); } @@ -377,7 +380,7 @@ contract FacetCutTest is DiamondCutTest { initCalldata: bytes("") }); - vm.expectRevert(abi.encodePacked("J1")); + vm.expectRevert(SelectorsMustAllHaveSameFreezability.selector); diamondCutTestContract.diamondCut(diamondCutData); } diff --git a/l1-contracts/test/foundry/unit/concrete/DiamondCut/Initialization.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/Initialization.t.sol similarity index 80% rename from l1-contracts/test/foundry/unit/concrete/DiamondCut/Initialization.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/Initialization.t.sol index cbcb012a5..94996c5e1 100644 --- a/l1-contracts/test/foundry/unit/concrete/DiamondCut/Initialization.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/Initialization.t.sol @@ -6,6 +6,7 @@ import {RevertFallback} from "contracts/dev-contracts/RevertFallback.sol"; import {ReturnSomething} from "contracts/dev-contracts/ReturnSomething.sol"; import {DiamondCutTestContract} from "contracts/dev-contracts/test/DiamondCutTestContract.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {DelegateCallFailed, BadReturnData, MalformedCalldata, NonEmptyCalldata} from "contracts/common/L1ContractErrors.sol"; contract InitializationTest is DiamondCutTest { address private revertFallbackAddress; @@ -27,8 +28,8 @@ contract InitializationTest is DiamondCutTest { initAddress: revertFallbackAddress, initCalldata: bytes("") }); - - vm.expectRevert(abi.encodePacked("I")); + bytes memory emptyBytes; + vm.expectRevert(abi.encodeWithSelector(DelegateCallFailed.selector, emptyBytes)); diamondCutTestContract.diamondCut(diamondCutData); } @@ -40,8 +41,8 @@ contract InitializationTest is DiamondCutTest { initAddress: signerAddress, initCalldata: bytes("") }); - - vm.expectRevert(abi.encodePacked("lp")); + bytes memory emptyBytes; + vm.expectRevert(abi.encodeWithSelector(DelegateCallFailed.selector, emptyBytes)); diamondCutTestContract.diamondCut(diamondCutData); } @@ -54,7 +55,7 @@ contract InitializationTest is DiamondCutTest { initCalldata: bytes("0x11") }); - vm.expectRevert(abi.encodePacked("H")); + vm.expectRevert(NonEmptyCalldata.selector); diamondCutTestContract.diamondCut(diamondCutData); } @@ -66,8 +67,8 @@ contract InitializationTest is DiamondCutTest { initAddress: returnSomethingAddress, initCalldata: bytes("") }); - - vm.expectRevert(abi.encodePacked("lp1")); + bytes memory returnData = hex"0000000000000000000000000000000000000000000000000000000000000000"; + vm.expectRevert(abi.encodeWithSelector(DelegateCallFailed.selector, returnData)); diamondCutTestContract.diamondCut(diamondCutData); } } diff --git a/l1-contracts/test/foundry/unit/concrete/DiamondCut/UpgradeLogic.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/UpgradeLogic.t.sol similarity index 84% rename from l1-contracts/test/foundry/unit/concrete/DiamondCut/UpgradeLogic.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/UpgradeLogic.t.sol index 41c07d9a2..8823f20b9 100644 --- a/l1-contracts/test/foundry/unit/concrete/DiamondCut/UpgradeLogic.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/UpgradeLogic.t.sol @@ -6,14 +6,17 @@ import {DiamondCutTest} from "./_DiamondCut_Shared.t.sol"; import {DiamondCutTestContract} from "contracts/dev-contracts/test/DiamondCutTestContract.sol"; import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; -import {VerifierParams, FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; +import {VerifierParams, FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; import {IVerifier} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {Utils} from "../Utils/Utils.sol"; import {InitializeData} from "contracts/state-transition/chain-deps/DiamondInit.sol"; -import {DummyStateTransitionManager} from "contracts/dev-contracts/test/DummyStateTransitionManager.sol"; +import {DummyChainTypeManager} from "contracts/dev-contracts/test/DummyChainTypeManager.sol"; +import {DummyBridgehub} from "contracts/dev-contracts/test/DummyBridgehub.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {DiamondAlreadyFrozen, Unauthorized, DiamondFreezeIncorrectState, DiamondNotFrozen} from "contracts/common/L1ContractErrors.sol"; contract UpgradeLogicTest is DiamondCutTest { DiamondProxy private diamondProxy; @@ -22,7 +25,7 @@ contract UpgradeLogicTest is DiamondCutTest { AdminFacet private proxyAsAdmin; GettersFacet private proxyAsGetters; address private admin; - address private stateTransitionManager; + address private chainTypeManager; address private randomSigner; function getAdminSelectors() private view returns (bytes4[] memory) { @@ -43,12 +46,13 @@ contract UpgradeLogicTest is DiamondCutTest { function setUp() public { admin = makeAddr("admin"); - stateTransitionManager = address(new DummyStateTransitionManager()); + chainTypeManager = address(new DummyChainTypeManager()); randomSigner = makeAddr("randomSigner"); + DummyBridgehub dummyBridgehub = new DummyBridgehub(); diamondCutTestContract = new DiamondCutTestContract(); diamondInit = new DiamondInit(); - adminFacet = new AdminFacet(); + adminFacet = new AdminFacet(block.chainid); gettersFacet = new GettersFacet(); Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](2); @@ -74,13 +78,12 @@ contract UpgradeLogicTest is DiamondCutTest { InitializeData memory params = InitializeData({ // TODO REVIEW chainId: 1, - bridgehub: makeAddr("bridgehub"), - stateTransitionManager: stateTransitionManager, + bridgehub: address(dummyBridgehub), + chainTypeManager: chainTypeManager, protocolVersion: 0, admin: admin, validatorTimelock: makeAddr("validatorTimelock"), - baseToken: makeAddr("baseToken"), - baseTokenBridge: makeAddr("baseTokenBridge"), + baseTokenAssetId: DataEncoding.encodeNTVAssetId(1, (makeAddr("baseToken"))), storedBatchZero: bytes32(0), // genesisBatchHash: 0x02c775f0a90abf7a0e8043f2fdc38f0580ca9f9996a895d05a501bfeaa3b2e21, // genesisIndexRepeatedStorageChanges: 0, @@ -118,24 +121,23 @@ contract UpgradeLogicTest is DiamondCutTest { function test_RevertWhen_EmergencyFreezeWhenUnauthorizedGovernor() public { vm.startPrank(randomSigner); - - vm.expectRevert(abi.encodePacked("Hyperchain: not state transition manager")); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomSigner)); proxyAsAdmin.freezeDiamond(); } - function test_RevertWhen_DoubleFreezingBySTM() public { - vm.startPrank(stateTransitionManager); + function test_RevertWhen_DoubleFreezingByCTM() public { + vm.startPrank(chainTypeManager); proxyAsAdmin.freezeDiamond(); - vm.expectRevert(abi.encodePacked("a9")); + vm.expectRevert(DiamondAlreadyFrozen.selector); proxyAsAdmin.freezeDiamond(); } function test_RevertWhen_UnfreezingWhenNotFrozen() public { - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); - vm.expectRevert(abi.encodePacked("a7")); + vm.expectRevert(DiamondNotFrozen.selector); proxyAsAdmin.unfreezeDiamond(); } @@ -154,7 +156,7 @@ contract UpgradeLogicTest is DiamondCutTest { initCalldata: bytes("") }); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); proxyAsAdmin.executeUpgrade(diamondCutData); @@ -185,7 +187,7 @@ contract UpgradeLogicTest is DiamondCutTest { initCalldata: bytes("") }); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); proxyAsAdmin.executeUpgrade(diamondCutData); proxyAsAdmin.executeUpgrade(diamondCutData); diff --git a/l1-contracts/test/foundry/unit/concrete/DiamondCut/_DiamondCut_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/_DiamondCut_Shared.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/DiamondCut/_DiamondCut_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/DiamondCut/_DiamondCut_Shared.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/Executor/Authorization.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Authorization.t.sol similarity index 64% rename from l1-contracts/test/foundry/unit/concrete/Executor/Authorization.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Executor/Authorization.t.sol index 498fb21a2..59869620b 100644 --- a/l1-contracts/test/foundry/unit/concrete/Executor/Authorization.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Authorization.t.sol @@ -6,6 +6,7 @@ import {Utils} from "../Utils/Utils.sol"; import {ExecutorTest} from "./_Executor_Shared.t.sol"; import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; contract AuthorizationTest is ExecutorTest { IExecutor.StoredBatchInfo private storedBatchInfo; @@ -33,7 +34,7 @@ contract AuthorizationTest is ExecutorTest { bootloaderHeapInitialContentsHash: Utils.randomBytes32("bootloaderHeapInitialContentsHash"), eventsQueueStateHash: Utils.randomBytes32("eventsQueueStateHash"), systemLogs: bytes(""), - pubdataCommitments: bytes("") + operatorDAInput: bytes("") }); } @@ -43,8 +44,12 @@ contract AuthorizationTest is ExecutorTest { vm.prank(randomSigner); - vm.expectRevert(bytes.concat("Hyperchain: not validator")); - executor.commitBatches(storedBatchInfo, commitBatchInfoArray); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomSigner)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + storedBatchInfo, + commitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); } function test_RevertWhen_ProvingByUnauthorisedAddress() public { @@ -53,8 +58,13 @@ contract AuthorizationTest is ExecutorTest { vm.prank(owner); - vm.expectRevert(bytes.concat("Hyperchain: not validator")); - executor.proveBatches(storedBatchInfo, storedBatchInfoArray, proofInput); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, owner)); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + storedBatchInfo, + storedBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); } function test_RevertWhen_ExecutingByUnauthorizedAddress() public { @@ -63,7 +73,11 @@ contract AuthorizationTest is ExecutorTest { vm.prank(randomSigner); - vm.expectRevert(bytes.concat("Hyperchain: not validator")); - executor.executeBatches(storedBatchInfoArray); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomSigner)); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatchInfoArray, + Utils.emptyData() + ); + executor.executeBatchesSharedBridge(uint256(0), executeBatchFrom, executeBatchTo, executeData); } } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Executor/Committing.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Committing.t.sol new file mode 100644 index 000000000..5c2d5b65a --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Committing.t.sol @@ -0,0 +1,958 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import "forge-std/console.sol"; +import {Vm} from "forge-std/Test.sol"; +import {Utils, L2_BOOTLOADER_ADDRESS, L2_SYSTEM_CONTEXT_ADDRESS, L2_TO_L1_MESSENGER} from "../Utils/Utils.sol"; +import {ExecutorTest, EMPTY_PREPUBLISHED_COMMITMENT, POINT_EVALUATION_PRECOMPILE_RESULT} from "./_Executor_Shared.t.sol"; + +import {IExecutor, TOTAL_BLOBS_IN_COMMITMENT} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {POINT_EVALUATION_PRECOMPILE_ADDR} from "contracts/common/Config.sol"; +import {L2_PUBDATA_CHUNK_PUBLISHER_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {BLS_MODULUS} from "da-contracts/DAUtils.sol"; +import {TimeNotReached, BatchNumberMismatch, PubdataCommitmentsTooBig, InvalidPubdataCommitmentsSize, PubdataCommitmentsEmpty, L2TimestampTooBig, EmptyBlobVersionHash, CanOnlyProcessOneBatch, TimestampError, LogAlreadyProcessed, InvalidLogSender, UnexpectedSystemLog, HashMismatch, BatchHashMismatch, ValueMismatch, MissingSystemLogs, InvalidPubdataLength, NonEmptyBlobVersionHash, BlobHashCommitmentError} from "contracts/common/L1ContractErrors.sol"; + +contract CommittingTest is ExecutorTest { + bytes32[] defaultBlobVersionedHashes; + bytes32 l2DAValidatorOutputHash; + bytes operatorDAInput; + bytes defaultBlobCommitment; + bytes16 defaultBlobOpeningPoint = 0x7142c5851421a2dc03dde0aabdb0ffdb; + bytes32 defaultBlobClaimedValue = 0x1e5eea3bbb85517461c1d1c7b84c7c2cec050662a5e81a71d5d7e2766eaff2f0; + + function setUp() public { + // the values below are taken from the actual blob used by Era + bytes1 source = bytes1(0x01); + defaultBlobCommitment = Utils.getDefaultBlobCommitment(); + + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 1; + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + source, + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + l2DAValidatorOutputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + defaultBlobVersionedHashes = new bytes32[](1); + defaultBlobVersionedHashes[0] = 0x01c024b4740620a5849f95930cefe298933bdf588123ea897cdf0f2462f6d2d5; + + bytes memory precompileInput = Utils.defaultPointEvaluationPrecompileInput(defaultBlobVersionedHashes[0]); + vm.mockCall(POINT_EVALUATION_PRECOMPILE_ADDR, precompileInput, POINT_EVALUATION_PRECOMPILE_RESULT); + } + + function test_RevertWhen_CommittingWithWrongLastCommittedBatchData() public { + IExecutor.CommitBatchInfo[] memory newCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + newCommitBatchInfoArray[0] = newCommitBatchInfo; + + IExecutor.StoredBatchInfo memory wrongGenesisStoredBatchInfo = genesisStoredBatchInfo; + wrongGenesisStoredBatchInfo.timestamp = 1000; + + vm.prank(validator); + + vm.expectRevert( + abi.encodeWithSelector( + BatchHashMismatch.selector, + keccak256(abi.encode(genesisStoredBatchInfo)), + keccak256(abi.encode(wrongGenesisStoredBatchInfo)) + ) + ); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + wrongGenesisStoredBatchInfo, + newCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithWrongOrderOfBatches() public { + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.batchNumber = 2; // wrong batch number + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(BatchNumberMismatch.selector, uint256(1), uint256(2))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithWrongNewBatchTimestamp() public { + bytes32 wrongNewBatchTimestamp = Utils.randomBytes32("wrongNewBatchTimestamp"); + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + + wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + wrongNewBatchTimestamp + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.expectRevert(TimestampError.selector); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithTooSmallNewBatchTimestamp() public { + uint256 wrongNewBatchTimestamp = 1; + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(1, 1) + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + wrongNewCommitBatchInfo.timestamp = uint64(wrongNewBatchTimestamp); + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.expectRevert(abi.encodeWithSelector(TimeNotReached.selector, 1, 2)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingTooBigLastL2BatchTimestamp() public { + uint64 wrongNewBatchTimestamp = 0xffffffff; + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(wrongNewBatchTimestamp, wrongNewBatchTimestamp) + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + wrongNewCommitBatchInfo.timestamp = wrongNewBatchTimestamp; + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.expectRevert(abi.encodeWithSelector(L2TimestampTooBig.selector)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithWrongPreviousBatchHash() public { + bytes32 wrongPreviousBatchHash = Utils.randomBytes32("wrongPreviousBatchHash"); + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + wrongL2Logs[uint256(uint256(SystemLogKey.PREV_BATCH_HASH_KEY))] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PREV_BATCH_HASH_KEY), + wrongPreviousBatchHash + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.expectRevert(abi.encodeWithSelector(HashMismatch.selector, wrongPreviousBatchHash, bytes32(0))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithoutProcessingSystemContextLog() public { + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + delete wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))]; + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.expectRevert(abi.encodeWithSelector(MissingSystemLogs.selector, 127, 125)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithProcessingSystemContextLogTwice() public { + bytes[] memory l2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + + bytes memory wrongL2Logs = abi.encodePacked( + Utils.encodePacked(l2Logs), + // solhint-disable-next-line func-named-parameters + Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + bytes32("") + ) + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = wrongL2Logs; + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.expectRevert(abi.encodeWithSelector(LogAlreadyProcessed.selector, 1)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_UnexpectedL2ToL1Log() public { + address unexpectedAddress = address(0); + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( + true, + unexpectedAddress, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + bytes32("") + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + + vm.expectRevert( + abi.encodeWithSelector( + InvalidLogSender.selector, + address(0), + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY) + ) + ); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithWrongCanonicalTxHash() public { + bytes32 wrongChainedPriorityHash = Utils.randomBytes32("canonicalTxHash"); + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + wrongL2Logs[uint256(uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY))] = Utils.constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY), + wrongChainedPriorityHash + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.blobhashes(defaultBlobVersionedHashes); + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(HashMismatch.selector, wrongChainedPriorityHash, keccak256(""))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithWrongNumberOfLayer1txs() public { + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + wrongL2Logs[uint256(uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY))] = Utils.constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY), + bytes32(bytes1(0x01)) + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + wrongNewCommitBatchInfo.numberOfLayer1Txs = 2; + wrongNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.blobhashes(defaultBlobVersionedHashes); + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(ValueMismatch.selector, uint256(bytes32(bytes1(0x01))), uint256(2))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_CommittingWithUnknownSystemLogKey() public { + bytes[] memory l2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + bytes memory wrongL2Logs = abi.encodePacked( + Utils.encodePacked(l2Logs), + // solhint-disable-next-line func-named-parameters + abi.encodePacked(bytes2(0x0001), bytes2(0x0000), L2_SYSTEM_CONTEXT_ADDRESS, uint256(119), bytes32("")) + ); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = abi.encodePacked(wrongL2Logs); + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(UnexpectedSystemLog.selector, uint256(119))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_SystemLogIsFromIncorrectAddress() public { + bytes32[7] memory values = [ + bytes32(""), + bytes32(""), + bytes32(""), + bytes32(""), + bytes32(""), + bytes32(""), + bytes32("") + ]; + + for (uint256 i = 0; i < values.length; i++) { + bytes[] memory wrongL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + address wrongAddress = makeAddr("randomAddress"); + wrongL2Logs[i] = Utils.constructL2Log(true, wrongAddress, i, values[i]); + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(InvalidLogSender.selector, wrongAddress, i)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + } + + function test_RevertWhen_SystemLogIsMissing() public { + for (uint256 i = 0; i < 7; i++) { + bytes[] memory l2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + delete l2Logs[i]; + + IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; + wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(l2Logs); + + IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; + + vm.prank(validator); + + uint256 allLogsProcessed = uint256(127); + vm.expectRevert(abi.encodeWithSelector(MissingSystemLogs.selector, 127, allLogsProcessed ^ (1 << i))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + wrongNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + } + + function test_SuccessfullyCommitBatch() public { + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 1; + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + bytes1(0x01), + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + l2DAValidatorOutputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + correctNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + bytes32[] memory blobHashes = new bytes32[](TOTAL_BLOBS_IN_COMMITMENT); + blobHashes[0] = blobsLinearHashes[0]; + + bytes32[] memory blobCommitments = new bytes32[](TOTAL_BLOBS_IN_COMMITMENT); + blobCommitments[0] = keccak256( + abi.encodePacked( + defaultBlobVersionedHashes[0], + abi.encodePacked(defaultBlobOpeningPoint, defaultBlobClaimedValue) + ) + ); + + bytes32 expectedBatchCommitment = Utils.createBatchCommitment( + correctNewCommitBatchInfo, + uncompressedStateDiffHash, + blobCommitments, + blobHashes + ); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + vm.recordLogs(); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + + Vm.Log[] memory entries = vm.getRecordedLogs(); + + assertEq(entries.length, 1); + assertEq(entries[0].topics[0], keccak256("BlockCommit(uint256,bytes32,bytes32)")); + assertEq(entries[0].topics[1], bytes32(uint256(1))); // batchNumber + assertEq(entries[0].topics[2], correctNewCommitBatchInfo.newStateRoot); // batchHash + assertEq(entries[0].topics[3], expectedBatchCommitment); // commitment + + uint256 totalBatchesCommitted = getters.getTotalBatchesCommitted(); + assertEq(totalBatchesCommitted, 1); + } + + function test_SuccessfullyCommitBatchWithOneBlob() public { + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + correctNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.recordLogs(); + + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + + Vm.Log[] memory entries = vm.getRecordedLogs(); + + assertEq(entries.length, 1); + assertEq(entries[0].topics[0], keccak256("BlockCommit(uint256,bytes32,bytes32)")); + assertEq(entries[0].topics[1], bytes32(uint256(1))); // batchNumber + + uint256 totalBatchesCommitted = getters.getTotalBatchesCommitted(); + assertEq(totalBatchesCommitted, 1); + + vm.clearMockedCalls(); + } + + function test_SuccessfullyCommitBatchWithTwoBlob() public { + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 2; + bytes32[] memory blobsLinearHashes = new bytes32[](2); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes1"); + blobsLinearHashes[1] = Utils.randomBytes32("blobsLinearHashes2"); + + bytes memory daInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + bytes1(0x01), + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT, + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + bytes32[] memory blobVersionedHashes = new bytes32[](2); + blobVersionedHashes[0] = defaultBlobVersionedHashes[0]; + blobVersionedHashes[1] = defaultBlobVersionedHashes[0]; + + bytes32 outputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + bytes[] memory correctL2Logs = Utils.createSystemLogs(outputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + correctNewCommitBatchInfo.operatorDAInput = daInput; + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(blobVersionedHashes); + + vm.recordLogs(); + + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + + Vm.Log[] memory entries = vm.getRecordedLogs(); + + assertEq(entries.length, 1); + assertEq(entries[0].topics[0], keccak256("BlockCommit(uint256,bytes32,bytes32)")); + assertEq(entries[0].topics[1], bytes32(uint256(1))); // batchNumber + + uint256 totalBatchesCommitted = getters.getTotalBatchesCommitted(); + assertEq(totalBatchesCommitted, 1); + + vm.clearMockedCalls(); + } + + function test_RevertWhen_CommittingBatchMoreThanOneBatch() public { + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](2); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[1] = correctNewCommitBatchInfo; + + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(CanOnlyProcessOneBatch.selector)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_EmptyPubdataCommitments() public { + bytes memory operatorDAInput = "\x01"; + + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + vm.prank(validator); + + vm.expectRevert("too small"); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_PartialPubdataCommitment() public { + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + correctNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + bytes memory daInput = abi.encodePacked( + Utils.randomBytes32("uncompressedStateDiffHash"), + Utils.randomBytes32("totalL2PubdataHash"), + uint8(1), + blobsLinearHashes, + bytes1(0x01), + bytes("") + ); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = daInput; + + vm.prank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + + vm.expectRevert(InvalidPubdataCommitmentsSize.selector); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_TooManyPubdataCommitments() public { + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + bytes memory daInput = abi.encodePacked( + Utils.randomBytes32("uncompressedStateDiffHash"), + Utils.randomBytes32("totalL2PubdataHash"), + uint8(1), + blobsLinearHashes, + bytes1(0x01), + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT, + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = daInput; + + vm.prank(validator); + + vm.expectRevert(InvalidPubdataCommitmentsSize.selector); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_NotEnoughPubdataCommitments() public { + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + bytes32[] memory versionedHashes = new bytes32[](2); + versionedHashes[0] = defaultBlobVersionedHashes[0]; + versionedHashes[1] = 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563; + + vm.prank(validator); + vm.blobhashes(versionedHashes); + + vm.expectRevert(abi.encodeWithSelector(NonEmptyBlobVersionHash.selector, uint256(1))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + + vm.clearMockedCalls(); + } + + function test_RevertWhen_BlobDoesNotExist() public { + vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(bytes32(0))); + + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(EmptyBlobVersionHash.selector, 0)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + + vm.clearMockedCalls(); + } + + function test_RevertWhen_SecondBlobSentWithoutCommitmentData() public { + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + bytes32[] memory blobVersionedHashes = new bytes32[](2); + blobVersionedHashes[0] = defaultBlobVersionedHashes[0]; + blobVersionedHashes[1] = defaultBlobVersionedHashes[0]; + + vm.prank(validator); + vm.blobhashes(blobVersionedHashes); + + vm.expectRevert(abi.encodeWithSelector(NonEmptyBlobVersionHash.selector, uint256(1))); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + + vm.clearMockedCalls(); + } + + function test_RevertWhen_SecondBlobLinearHashZeroWithCommitment() public { + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 2; + bytes32[] memory blobsLinearHashes = new bytes32[](2); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes1"); + blobsLinearHashes[1] = bytes32(0); + + bytes memory operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + bytes1(0x01), + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT, + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + bytes32[] memory blobVersionedHashes = new bytes32[](2); + blobVersionedHashes[0] = defaultBlobVersionedHashes[0]; + blobVersionedHashes[1] = defaultBlobVersionedHashes[0]; + + bytes32 outputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + bytes[] memory correctL2Logs = Utils.createSystemLogs(outputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + vm.blobhashes(blobVersionedHashes); + vm.prank(validator); + + vm.expectRevert(abi.encodeWithSelector(BlobHashCommitmentError.selector, uint256(1), true, false)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } + + function test_RevertWhen_SecondBlobLinearHashNotZeroWithEmptyCommitment() public { + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 2; + bytes32[] memory blobsLinearHashes = new bytes32[](2); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes1"); + blobsLinearHashes[1] = Utils.randomBytes32("blobsLinearHashes2"); + + bytes memory operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + bytes1(0x01), + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + bytes32[] memory blobVersionedHashes = new bytes32[](2); + blobVersionedHashes[0] = defaultBlobVersionedHashes[0]; + blobVersionedHashes[1] = defaultBlobVersionedHashes[0]; + + bytes32 outputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + bytes[] memory correctL2Logs = Utils.createSystemLogs(outputHash); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + vm.blobhashes(blobVersionedHashes); + vm.prank(validator); + + // It will just panic with array out of bounds + vm.expectRevert(); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Executor/Executing.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Executing.t.sol similarity index 56% rename from l1-contracts/test/foundry/unit/concrete/Executor/Executing.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Executor/Executing.t.sol index 3360150d5..fbfc92fd4 100644 --- a/l1-contracts/test/foundry/unit/concrete/Executor/Executing.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Executing.t.sol @@ -4,18 +4,57 @@ pragma solidity 0.8.24; import {Vm} from "forge-std/Test.sol"; import {Utils, L2_SYSTEM_CONTEXT_ADDRESS} from "../Utils/Utils.sol"; -import {ExecutorTest} from "./_Executor_Shared.t.sol"; +import {ExecutorTest, EMPTY_PREPUBLISHED_COMMITMENT, POINT_EVALUATION_PRECOMPILE_RESULT} from "./_Executor_Shared.t.sol"; +import {POINT_EVALUATION_PRECOMPILE_ADDR} from "contracts/common/Config.sol"; import {L2_BOOTLOADER_ADDRESS} from "contracts/common/L2ContractAddresses.sol"; import {COMMIT_TIMESTAMP_NOT_OLDER, REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "contracts/common/Config.sol"; import {IExecutor, SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {PriorityOperationsRollingHashMismatch, BatchHashMismatch, NonSequentialBatch, CantExecuteUnprovenBatches, QueueIsEmpty, TxHashMismatch} from "contracts/common/L1ContractErrors.sol"; contract ExecutingTest is ExecutorTest { + bytes32 l2DAValidatorOutputHash; + bytes32[] blobVersionedHashes; + function setUp() public { + bytes1 source = bytes1(0x01); + bytes memory defaultBlobCommitment = Utils.getDefaultBlobCommitment(); + + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 1; + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + bytes memory operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + source, + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + l2DAValidatorOutputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + blobVersionedHashes = new bytes32[](1); + blobVersionedHashes[0] = 0x01c024b4740620a5849f95930cefe298933bdf588123ea897cdf0f2462f6d2d5; + + bytes memory precompileInput = Utils.defaultPointEvaluationPrecompileInput(blobVersionedHashes[0]); + vm.mockCall(POINT_EVALUATION_PRECOMPILE_ADDR, precompileInput, POINT_EVALUATION_PRECOMPILE_RESULT); + + // This currently only uses the legacy priority queue, not the priority tree. + executor.setPriorityTreeStartIndex(100); vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1); currentTimestamp = block.timestamp; - bytes[] memory correctL2Logs = Utils.createSystemLogs(); + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); correctL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, @@ -27,13 +66,19 @@ contract ExecutingTest is ExecutorTest { newCommitBatchInfo.systemLogs = l2Logs; newCommitBatchInfo.timestamp = uint64(currentTimestamp); + newCommitBatchInfo.operatorDAInput = operatorDAInput; IExecutor.CommitBatchInfo[] memory commitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); commitBatchInfoArray[0] = newCommitBatchInfo; vm.prank(validator); + vm.blobhashes(blobVersionedHashes); vm.recordLogs(); - executor.commitBatches(genesisStoredBatchInfo, commitBatchInfoArray); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + commitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); Vm.Log[] memory entries = vm.getRecordedLogs(); newStoredBatchInfo = IExecutor.StoredBatchInfo({ @@ -51,7 +96,12 @@ contract ExecutingTest is ExecutorTest { storedBatchInfoArray[0] = newStoredBatchInfo; vm.prank(validator); - executor.proveBatches(genesisStoredBatchInfo, storedBatchInfoArray, proofInput); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + storedBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); } function test_RevertWhen_ExecutingBlockWithWrongBatchNumber() public { @@ -62,8 +112,12 @@ contract ExecutingTest is ExecutorTest { storedBatchInfoArray[0] = wrongNewStoredBatchInfo; vm.prank(validator); - vm.expectRevert(bytes.concat("k")); - executor.executeBatches(storedBatchInfoArray); + vm.expectRevert(NonSequentialBatch.selector); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatchInfoArray, + Utils.generatePriorityOps(storedBatchInfoArray.length) + ); + executor.executeBatchesSharedBridge(uint256(0), executeBatchFrom, executeBatchTo, executeData); } function test_RevertWhen_ExecutingBlockWithWrongData() public { @@ -74,30 +128,44 @@ contract ExecutingTest is ExecutorTest { storedBatchInfoArray[0] = wrongNewStoredBatchInfo; vm.prank(validator); - vm.expectRevert(bytes.concat("exe10")); - executor.executeBatches(storedBatchInfoArray); + vm.expectRevert( + abi.encodeWithSelector( + BatchHashMismatch.selector, + keccak256(abi.encode(newStoredBatchInfo)), + keccak256(abi.encode(wrongNewStoredBatchInfo)) + ) + ); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatchInfoArray, + Utils.generatePriorityOps(storedBatchInfoArray.length) + ); + executor.executeBatchesSharedBridge(uint256(0), executeBatchFrom, executeBatchTo, executeData); } function test_RevertWhen_ExecutingRevertedBlockWithoutCommittingAndProvingAgain() public { vm.prank(validator); - executor.revertBatches(0); + executor.revertBatchesSharedBridge(0, 0); IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); storedBatchInfoArray[0] = newStoredBatchInfo; vm.prank(validator); - vm.expectRevert(bytes.concat("n")); - executor.executeBatches(storedBatchInfoArray); + vm.expectRevert(CantExecuteUnprovenBatches.selector); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatchInfoArray, + Utils.generatePriorityOps(storedBatchInfoArray.length) + ); + executor.executeBatchesSharedBridge(uint256(0), executeBatchFrom, executeBatchTo, executeData); } function test_RevertWhen_ExecutingUnavailablePriorityOperationHash() public { vm.prank(validator); - executor.revertBatches(0); + executor.revertBatchesSharedBridge(0, 0); bytes32 arbitraryCanonicalTxHash = Utils.randomBytes32("arbitraryCanonicalTxHash"); bytes32 chainedPriorityTxHash = keccak256(bytes.concat(keccak256(""), arbitraryCanonicalTxHash)); - bytes[] memory correctL2Logs = Utils.createSystemLogs(); + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, @@ -126,8 +194,13 @@ contract ExecutingTest is ExecutorTest { correctNewCommitBatchInfoArray[0] = correctNewCommitBatchInfo; vm.prank(validator); + vm.blobhashes(blobVersionedHashes); vm.recordLogs(); - executor.commitBatches(genesisStoredBatchInfo, correctNewCommitBatchInfoArray); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); Vm.Log[] memory entries = vm.getRecordedLogs(); IExecutor.StoredBatchInfo memory correctNewStoredBatchInfo = newStoredBatchInfo; @@ -140,21 +213,37 @@ contract ExecutingTest is ExecutorTest { correctNewStoredBatchInfoArray[0] = correctNewStoredBatchInfo; vm.prank(validator); - executor.proveBatches(genesisStoredBatchInfo, correctNewStoredBatchInfoArray, proofInput); + uint256 processBatchFrom; + uint256 processBatchTo; + bytes memory processData; + { + (processBatchFrom, processBatchTo, processData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + correctNewStoredBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), processBatchFrom, processBatchTo, processData); + } vm.prank(validator); - vm.expectRevert(bytes.concat("s")); - executor.executeBatches(correctNewStoredBatchInfoArray); + vm.expectRevert(QueueIsEmpty.selector); + { + (processBatchFrom, processBatchTo, processData) = Utils.encodeExecuteBatchesData( + correctNewStoredBatchInfoArray, + Utils.generatePriorityOps(correctNewStoredBatchInfoArray.length) + ); + executor.executeBatchesSharedBridge(uint256(0), processBatchFrom, processBatchTo, processData); + } } function test_RevertWhen_ExecutingWithUnmatchedPriorityOperationHash() public { vm.prank(validator); - executor.revertBatches(0); + executor.revertBatchesSharedBridge(0, 0); bytes32 arbitraryCanonicalTxHash = Utils.randomBytes32("arbitraryCanonicalTxHash"); bytes32 chainedPriorityTxHash = keccak256(bytes.concat(keccak256(""), arbitraryCanonicalTxHash)); - bytes[] memory correctL2Logs = Utils.createSystemLogs(); + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, @@ -182,8 +271,13 @@ contract ExecutingTest is ExecutorTest { correctNewCommitBatchInfoArray[0] = correctNewCommitBatchInfo; vm.prank(validator); + vm.blobhashes(blobVersionedHashes); vm.recordLogs(); - executor.commitBatches(genesisStoredBatchInfo, correctNewCommitBatchInfoArray); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); Vm.Log[] memory entries = vm.getRecordedLogs(); IExecutor.StoredBatchInfo memory correctNewStoredBatchInfo = newStoredBatchInfo; @@ -196,7 +290,17 @@ contract ExecutingTest is ExecutorTest { correctNewStoredBatchInfoArray[0] = correctNewStoredBatchInfo; vm.prank(validator); - executor.proveBatches(genesisStoredBatchInfo, correctNewStoredBatchInfoArray, proofInput); + uint256 processBatchFrom; + uint256 processBatchTo; + bytes memory processData; + { + (processBatchFrom, processBatchTo, processData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + correctNewStoredBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), processBatchFrom, processBatchTo, processData); + } bytes32 randomFactoryDeps0 = Utils.randomBytes32("randomFactoryDeps0"); @@ -220,8 +324,15 @@ contract ExecutingTest is ExecutorTest { }); vm.prank(validator); - vm.expectRevert(bytes.concat("x")); - executor.executeBatches(correctNewStoredBatchInfoArray); + vm.expectRevert(PriorityOperationsRollingHashMismatch.selector); + + { + (processBatchFrom, processBatchTo, processData) = Utils.encodeExecuteBatchesData( + correctNewStoredBatchInfoArray, + Utils.generatePriorityOps(correctNewStoredBatchInfoArray.length) + ); + executor.executeBatchesSharedBridge(uint256(0), processBatchFrom, processBatchTo, processData); + } } function test_RevertWhen_CommittingBlockWithWrongPreviousBatchHash() public { @@ -245,9 +356,17 @@ contract ExecutingTest is ExecutorTest { IExecutor.StoredBatchInfo memory genesisBlock = genesisStoredBatchInfo; genesisBlock.batchHash = wrongPreviousBatchHash; + bytes32 storedBatchHash = getters.storedBlockHash(1); + vm.prank(validator); - vm.expectRevert(bytes.concat("i")); - executor.commitBatches(genesisBlock, correctNewCommitBatchInfoArray); + vm.expectRevert( + abi.encodeWithSelector(BatchHashMismatch.selector, storedBatchHash, keccak256(abi.encode(genesisBlock))) + ); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisBlock, + correctNewCommitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); } function test_ShouldExecuteBatchesuccessfully() public { @@ -255,7 +374,11 @@ contract ExecutingTest is ExecutorTest { storedBatchInfoArray[0] = newStoredBatchInfo; vm.prank(validator); - executor.executeBatches(storedBatchInfoArray); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatchInfoArray, + Utils.generatePriorityOps(storedBatchInfoArray.length) + ); + executor.executeBatchesSharedBridge(uint256(0), executeBatchFrom, executeBatchTo, executeData); uint256 totalBlocksExecuted = getters.getTotalBlocksExecuted(); assertEq(totalBlocksExecuted, 1); diff --git a/l1-contracts/test/foundry/unit/concrete/Executor/ExecutorProof.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Executor/ExecutorProof.t.sol similarity index 80% rename from l1-contracts/test/foundry/unit/concrete/Executor/ExecutorProof.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Executor/ExecutorProof.t.sol index 6c6d8a935..9f4530cc4 100644 --- a/l1-contracts/test/foundry/unit/concrete/Executor/ExecutorProof.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Executor/ExecutorProof.t.sol @@ -3,8 +3,8 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; @@ -31,7 +31,7 @@ contract TestExecutorFacet is ExecutorFacet { function processL2Logs( CommitBatchInfo calldata _newBatch, bytes32 _expectedSystemContractUpgradeTxHash - ) external pure returns (LogProcessingOutput memory logOutput) { + ) external view returns (LogProcessingOutput memory logOutput) { return _processL2Logs(_newBatch, _expectedSystemContractUpgradeTxHash); } @@ -71,8 +71,8 @@ contract ExecutorProofTest is Test { executor = TestExecutorFacet(diamondProxy); utilsFacet = UtilsFacet(diamondProxy); } - - /// This test is based on a block generated in a local system. + // todo + // This test is based on a block generated in a local system. function test_Hashes() public { utilsFacet.util_setL2DefaultAccountBytecodeHash( 0x0100065d134a862a777e50059f5e0fbe68b583f3617a67820f7edda0d7f253a0 @@ -80,6 +80,8 @@ contract ExecutorProofTest is Test { utilsFacet.util_setL2BootloaderBytecodeHash(0x010009416e909e0819593a9806bbc841d25c5cdfed3f4a1523497c6814e5194a); utilsFacet.util_setZkPorterAvailability(false); + bytes[] memory mockSystemLogs = Utils.createSystemLogsWithEmptyDAValidator(); + IExecutor.CommitBatchInfo memory nextBatch = IExecutor.CommitBatchInfo({ // ignored batchNumber: 1, @@ -93,10 +95,8 @@ contract ExecutorProofTest is Test { priorityOperationsHash: 0x167f4ca80269c9520ad951eeeda28dd3deb0715e9e2917461e81a60120a14183, bootloaderHeapInitialContentsHash: 0x540442e48142fa061a81822184f7790e7b69dea92153d38ef623802c6f0411c0, eventsQueueStateHash: 0xda42ab7994d4695a25f4ea8a9a485a592b7a31c20d5dae6363828de86d8826ea, - systemLogs: abi.encodePacked( - hex"00000000000000000000000000000000000000000000800b000000000000000000000000000000000000000000000000000000000000000416914ac26bb9cafa0f1dfaeaab10745a9094e1b60c7076fedf21651d6a25b5740000000a000000000000000000000000000000000000800b0000000000000000000000000000000000000000000000000000000000000003000000000000000000000000651bcde0000000000000000000000000651bcde20001000a00000000000000000000000000000000000080010000000000000000000000000000000000000000000000000000000000000005167f4ca80269c9520ad951eeeda28dd3deb0715e9e2917461e81a60120a141830001000a00000000000000000000000000000000000080010000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a0001000a00000000000000000000000000000000000080080000000000000000000000000000000000000000000000000000000000000000ee6ee8f50659bd8be3d86c32efb02baa5571cf3b46dd7ea3db733ae181747b8b0001000a0000000000000000000000000000000000008008000000000000000000000000000000000000000000000000000000000000000160fc5fb513ca8e6f6232a7410797954dcb6edbf9081768da24b483aca91c54db0001000a000000000000000000000000000000000000800800000000000000000000000000000000000000000000000000000000000000029a67073c2df8f53087fcfc32d82c98bba591da35df6ce1fb55a23b677d37f9fc000000000000000000000000000000000000000000008011000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080110000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000801100000000000000000000000000000000000000000000000000000000000000090000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008011000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008011000000000000000000000000000000000000000000000000000000000000000b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008011000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000" - ), - pubdataCommitments: abi.encodePacked( + systemLogs: Utils.encodePacked(mockSystemLogs), + operatorDAInput: abi.encodePacked( hex"000000000a000100000000000000000000000000000000000000008001760f6100ddbd86c4d5a58532923e7424d33ffb44145a26171d9b2595a349450b0000000000000000000000000000000000000000000000000000000000000001000100010000000000000000000000000000000000008001a789fe4e2a955eee45d44f408f86203c8f643910bf4888d1fd1465cdbc6376d800000000000000000000000000000000000000000000000000000000000000010001000200000000000000000000000000000000000080016ba43e7c7df11e5a655f22c9bce1b37434afd2bf8fcdb10100a460e6a2c0cc83000000000000000000000000000000000000000000000000000000000000000100010003000000000000000000000000000000000000800156e569838658c17c756aa9f6e40de8f1c41b1a67fea5214ec47869882ecda9bd0000000000000000000000000000000000000000000000000000000000000001000100040000000000000000000000000000000000008001ab5d064ba75c02635fd6e4de7fd8420eda54c4bda05bd61edabe201f2066d38f00000000000000000000000000000000000000000000000000000000000000010001000500000000000000000000000000000000000080015bcb6d7c735023e0884297db5016a6c704e3490ed0671417639313ecea86795b00000000000000000000000000000000000000000000000000000000000000010001000600000000000000000000000000000000000080015ee51b5b7d47fae5811a9f777174bb08d81d78098c8bd9430a7618756a0ceb8b00000000000000000000000000000000000000000000000000000000000000010001000700000000000000000000000000000000000080011ea63171021b9ab0846efbe0a06f7882d76e24a4900c74c14fa1e0bdf313ed560000000000000000000000000000000000000000000000000000000000000001000100080000000000000000000000000000000000008001574537f1665cd9c894d8d9834d32ed291f49ae1165a0e12a79a4937f2425bf70000000000000000000000000000000000000000000000000000000000000000100010009000000000000000000000000000000000000800190558033c8a3f7c20c81e613e00a9d0e678a7a14923e94e7cb99c8621c7918090000000000000000000000000000000000000000000000000000000000000001000000000000000001000c3104003d1291725c657fe486d0e626f562842175a705a9704c0980b40e3d716b95bbf9e8000100005dd96deb789fbc05264165795bf652190645bfae1ce253ce1db17087a898fb1e240ebf0d53563011198fddab33312923ba20f3c56cf1ba18ca5be9c053000100022bd65a924da61271d1dd5080fc640601185125830805e0ceb42f4185e5118fb454a12a3d9e0c1fbb89230f67044cc191e4f18459261233f659c9e2ba5e000100008b9feb52993729436da78b2863dd56d8d757e19c01a2cdcf1940e45ca9979941fa93f5f699afeab75e8b25cfea22004a8d2ea49f057741c2f2b910996d00010001bdf9205fb9bd185829f2c6bec2a6f100b86eff579da4fc2a8f1a15ea4afee3cea48e96b9bddb544b4569e60736a1f1fe919e223fcc08f74acf3513be1200010001bdf9205fb9bd185829f2c6bec2a6f100b86eff579da4fc2a8f1a15ea4a8755061217b6a78f5d5f8af6e326e482ebdc57f7144108662d122252ddcc27e7000100045dddc527887dc39b9cd189d6f183f16217393a5d3d3165fead2daeaf4f2d6916280c572561a809555de4a87d7a56d5bcca2c246a389dbb2a24c5639bdb0001000153c0f36532563ba2a10f52b865e558cd1a5eef9a9edd01c1cb23b74aa772beb4f3e3b784609f4e205a09863c0587e63b4b47664022cb34896a1711416b00010003e7842b0b4f4fd8e665883fe9c158ba8d38347840f1da0a75aca1fc284ce2428454b48df9f5551500fc50b63af4741b1cd21d4cfddc69aa46cb78eff45b00010000f183703a165afed04326ad5786316f6fc65b27f1cf17459a52bd1f57f27f896b7429e070ca76e3e33165ec75f6c9f439ee37f3b58822494b1251c8247500010001bdf9205fb9bd185829f2c6bec2a6f100b86eff579da4fc2a8f1a15ea4a05ea3d0bb218598c42b2e25ae5f6cbc9369b273ee6610450cade89775646b2a08902000000000000000000000000000000008b71d4a184058d07fccac4348ae02a1f663403231b0a40fa2c8c0ff73bdca092890200000000000000000000000000000000ab63c4cebbd508a7d7184f0b9134453eea7a09ca749610d5576f8046241b9cde890200000000000000000000000000000000e58af14be53d8ac56f58ff3e5b07c239bfb549149f067597e9d028f35e3c2b77890200000000000000000000000000000000b78e94980fec3a5f68aa25d0d934084907688e537e82c2942af905aab21413ab890200000000000000000000000000000000c4db460819691e825328b532024bbecdc40394c74307a00bd245fc658b1bd34f0901908827f2052a14b24a10cae1f9e259ead06a89a1d74ff736a54f54ebcf05eeb30901d32d07305b87debd25698d4dfac4c2f986693a4e9d9baff7da37a7b5ca8d01cb0901e73042e5dacff2ce20a720c9c6d694576e4afa7bbbafdc4d409c63b7ca8027b70901760a7405795441aceea3be649a53d02785cb3487b7bd23e3b4888a935cee010d09011f3acf5d6d7bfeab8a7112771866e28c3714e0c315a81ec6a58ab4ad1c3d6eb10901c207b49d14deb3af9bc960d57074e27386285c73248abc5fa1d72aa6e8664fa40901644f0c4e15446d7e5ff363c944b55bd6801a1f38afd984c3427569530cb663210901743be0243628b8e7e8f04c00fc4f88efae001250a7482b31e6a0ec87ee3598e7090171e91721f9918576d760f02f03cac47c6f4003316031848e3c1d99e6e83a47434102d84e69f2f480002d5a6962cccee5d4adb48a36bbbf443a531721484381125937f3001ac5ff875b41022f496efbbdb2007b727eb806c926fb20c8ad087c57422977cebd06373e26d19b640e5fe32c85ff39a904faf736ce00a25420c1d9d705358c134cc601d9d184cb4dfdde7e1cac2bc3d4d38bf9ec44e6210ee6b280123bafc586f77764488cd24c6a77546e5a0fe8bdfb4fa203cfaffc36cce4dd5b8901000000000000000000000000651bcde08e7dd06ac5b73b473be6bc5a51030f4c7437657cb7b29bf376c564b8d1675a5e8903000000000000000000000000651bcde24ba84e1f37d041bc6e55ba396826cc494e84d4815b6db52690422eea7386314f00e8e77626586f73b955364c7b4bbf0bb7f7685ebd40e852b164633a4acbd3244c3de2202ccb626ad387d70722e64fbe44562e2f231a290c08532b8d6aba402ff50025fe002039e87b424de2772b82d935f14e2b657429a1bcc04612391ea0330c90ebddefdda48eb2aa7f66ecf7940a280e9ef3fb2e95db0995538440a62af79861004434720529e816fd2e40f8031a8d7471ebcd00351db0787346bcfe8dfad8d2b479093588d0e847efa73a10ce20e4799fb1e46642d65617c7e5213fa04989d92d8903000000000000000000000000651bcde287ded247e1660f827071c7f1371934589751085384fc9f4462c1f1897c5c3eef890100000000000000000000000000000001911dd2ad743ff237d411648a0fe32c6d74eec060716a2a74352f6b1c435b5d670016914ac26bb9cafa0f1dfaeaab10745a9094e1b60c7076fedf21651d6a25b574686a068c708f1bdbefd9e6e454ac2b520fd41c8dcf23ecd4cee978c22f1c1f5f09ff974fe8b575175cefa919a5ba1c0ddf4409be4b16695dc7bd12f6701b99bd2e70a152312ad6f01657413b2eae9287f6b9adad93d5fed1a0dd5e13ec74ce1163146509bfe426f2315a69cb452bf388cccd321eca2746a1adf793b489e5c8f61c40688b7ef3e53defc56c78facf513e511f9f5ba0eb50dbcc745afea3b860da75b394d2d1627b6e2ef54fb7b187d0af61e4532c238f387ecf9f0b466f1d54414100018e519b65c8901b344a480638beadb923fbd3462e475d39acebe559d65ed5cb11a1b25279f1918477c35eec1332ff07001d3f85cf854b70d7552f93ba8e88d581064ca4c0df6ac456c00a0e83898ccd464c63e5008aa1a498cc0646b78eb216d9eeeec76ed0eb0ee6c352f35ca5f0b2edc2ca17d211cc5cb905ba10142f042a6ac836d9cef9a6916635c9a1c1d2dc62a9fe83e2230b506b98e0fded46249008fe28b813907a05ae0d773d8f31e330200e9336e0159034c137ed645fb67ccca8a152312ad6f01657413b2eae9287f6b9adad93d5fee5d8f810abde496ccbeb45a4f3c06af828975163a006257cbf18cefebbfb4cd409025f40404a3d37bba024799ce32d7c2a833aec8474288a26b246afa32b07b4a3ce00577261707065642045746865720000000000000000000000000000000000001a09cf14f266dfe87c4b33e6d934de01f8f7242199fa8783178117218fa033f7ab005745544800000000000000000000000000000000000000000000000000000008289026c5fa173652bd62774824698a6848c63031f853d0e275174552f35df33000577261707065642045746865720000000000000000000000000000000000001a1e59309944cbc900ae848855e10bc929f78e86c2179d6e96cf52bfd520f039200031000000000000000000000000000000000000000000000000000000000000021653a735395136e5494c5426ba972b45e34d36ebcb86ac104c724ab375fcce90a18580ba6aeebc6e6b89d226c79be8927257a436ad11d9c0305b18e9d78cab8f75a3aec2096302b67e3815939e29476fb36a0d8299a1b25279f1918477c35eec1332ff07001d3f85cf85688525f98e4859a9c6939f2d2f92e6b1950ed57e56137d717aca1ccf9754f719a1c7ebe9226d26524400a8959a08f411a727ae7bb68f8febecd89ffe9d84708d24544d452de3e22e62b3b2b872e430839a15115818a152312ad6f01657413b2eae9287f6b9adad93d5fe3fb60af355125687beeb90c066ace76c442b0f963a6afd0e3316fcdd673ad22c09ff30c8a03ec44e5337a1f9d66763cf1b319fdc6d8bc4981e1f47edbd86210614b909ff0cbdceb634b81192417b64d114d535ad3bdba97d6d7e90ee2a79bf1c132d3c2d09ff5cd85060f4ff26eb5b68a6687aee76c1b7a77575fdc86ba49b4faf5041377a79b14de8989f2385a6e23f6bd05a80e0d9231870c15a000142e50adc0d84bff439d0086d9fbab9984f8b27aa208935238a60cc62e7c9bb2ea1709e94c96366b3c40ea4854837c18733e5ac1193b8d8e4070d2eca4441b0378b572bd949ab764fd71c002b759613c3e29d425cf4000100012730c940a81021004e899c6ee4bec02f0667757b9d75a8f0714ce6c157f5940b7664e4f69f01fc530db36965e33599a1348629f07ae2d724007ac36a71a16baac84db583d88e0f3a8c082e3632fcc0e15757f0dcf5234b87af41fdee4c0999c4fe698a8d824415979ab839e6913a975a3055a152312ad6f01657413b2eae9287f6b9adad93d5fe00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" ) }); @@ -104,26 +104,24 @@ contract ExecutorProofTest is Test { nextBatch, 0x0000000000000000000000000000000000000000000000000000000000000000 ); - assertEq( - logOutput.stateDiffHash, - 0x9a67073c2df8f53087fcfc32d82c98bba591da35df6ce1fb55a23b677d37f9fc, - "stateDiffHash computation failed" - ); bytes32 nextCommitment = executor.createBatchCommitment( nextBatch, logOutput.stateDiffHash, - new bytes32[](6), - new bytes32[](6) + new bytes32[](16), + new bytes32[](16) ); assertEq( nextCommitment, - 0xa1dcde434352cda8e331e721232ff2d457d4074efae1e3d06ef5b10ffada0c9a, + 0x81e46ea22cdb4a0a6cb30b6c02170394703e9bdd101275d542a7c6c23c789898, "nextCommitment computation failed" ); bytes32 prevCommitment = 0x6ebf945305689a8c3ac993df7f002d41d311a762cd6bf39bb054ead8d1f54404; uint256 result = executor.getBatchProofPublicInput(prevCommitment, nextCommitment); - assertEq(result, 0xAC7931F2C11013FC24963E41B86E5325A79F1150350CB41E4F0876A7, "getBatchProofPublicInput"); + assertEq(result, 0x7C854720CBA105B9E34DA6A28770B93AD384C1BF98C497CCBFA4DADB, "getBatchProofPublicInput"); } + + // add this to be excluded from coverage report + function test() internal {} } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Executor/Proving.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Proving.t.sol new file mode 100644 index 000000000..73b104186 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Proving.t.sol @@ -0,0 +1,178 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Vm} from "forge-std/Test.sol"; +import {Utils, L2_SYSTEM_CONTEXT_ADDRESS} from "../Utils/Utils.sol"; + +import {ExecutorTest, POINT_EVALUATION_PRECOMPILE_RESULT, EMPTY_PREPUBLISHED_COMMITMENT} from "./_Executor_Shared.t.sol"; + +import {COMMIT_TIMESTAMP_NOT_OLDER, POINT_EVALUATION_PRECOMPILE_ADDR} from "contracts/common/Config.sol"; +import {IExecutor, SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {VerifiedBatchesExceedsCommittedBatches, BatchHashMismatch} from "contracts/common/L1ContractErrors.sol"; + +contract ProvingTest is ExecutorTest { + bytes32 l2DAValidatorOutputHash; + bytes32[] blobVersionedHashes; + bytes operatorDAInput; + + function setUp() public { + setUpCommitBatch(); + + vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1); + currentTimestamp = block.timestamp; + + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + bytes memory l2Logs = Utils.encodePacked(correctL2Logs); + + newCommitBatchInfo.timestamp = uint64(currentTimestamp); + newCommitBatchInfo.systemLogs = l2Logs; + newCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory commitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + commitBatchInfoArray[0] = newCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(blobVersionedHashes); + vm.recordLogs(); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + commitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + Vm.Log[] memory entries = vm.getRecordedLogs(); + + newStoredBatchInfo = IExecutor.StoredBatchInfo({ + batchNumber: 1, + batchHash: entries[0].topics[2], + indexRepeatedStorageChanges: 0, + numberOfLayer1Txs: 0, + priorityOperationsHash: keccak256(""), + l2LogsTreeRoot: 0, + timestamp: currentTimestamp, + commitment: entries[0].topics[3] + }); + } + + function setUpCommitBatch() public { + bytes1 source = bytes1(0x01); + bytes memory defaultBlobCommitment = Utils.getDefaultBlobCommitment(); + + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 1; + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + source, + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + l2DAValidatorOutputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + blobVersionedHashes = new bytes32[](1); + blobVersionedHashes[0] = 0x01c024b4740620a5849f95930cefe298933bdf588123ea897cdf0f2462f6d2d5; + + bytes memory precompileInput = Utils.defaultPointEvaluationPrecompileInput(blobVersionedHashes[0]); + vm.mockCall(POINT_EVALUATION_PRECOMPILE_ADDR, precompileInput, POINT_EVALUATION_PRECOMPILE_RESULT); + } + + function test_RevertWhen_ProvingWithWrongPreviousBlockData() public { + IExecutor.StoredBatchInfo memory wrongPreviousStoredBatchInfo = genesisStoredBatchInfo; + wrongPreviousStoredBatchInfo.batchNumber = 10; // Correct is 0 + + IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); + storedBatchInfoArray[0] = newStoredBatchInfo; + + vm.prank(validator); + + vm.expectRevert( + abi.encodeWithSelector( + BatchHashMismatch.selector, + keccak256(abi.encode(genesisStoredBatchInfo)), + keccak256(abi.encode(wrongPreviousStoredBatchInfo)) + ) + ); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + wrongPreviousStoredBatchInfo, + storedBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); + } + + function test_RevertWhen_ProvingWithWrongCommittedBlock() public { + IExecutor.StoredBatchInfo memory wrongNewStoredBatchInfo = newStoredBatchInfo; + wrongNewStoredBatchInfo.batchNumber = 10; // Correct is 1 + + IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); + storedBatchInfoArray[0] = wrongNewStoredBatchInfo; + + vm.prank(validator); + + vm.expectRevert( + abi.encodeWithSelector( + BatchHashMismatch.selector, + keccak256(abi.encode(newStoredBatchInfo)), + keccak256(abi.encode(wrongNewStoredBatchInfo)) + ) + ); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + storedBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); + } + + function test_RevertWhen_ProvingRevertedBlockWithoutCommittingAgain() public { + vm.prank(validator); + executor.revertBatchesSharedBridge(0, 0); + + IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); + storedBatchInfoArray[0] = newStoredBatchInfo; + + vm.prank(validator); + + vm.expectRevert(VerifiedBatchesExceedsCommittedBatches.selector); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + storedBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); + } + + function test_SuccessfulProve() public { + IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); + storedBatchInfoArray[0] = newStoredBatchInfo; + + vm.prank(validator); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + storedBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); + + uint256 totalBlocksVerified = getters.getTotalBlocksVerified(); + assertEq(totalBlocksVerified, 1); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Executor/Reverting.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Reverting.t.sol new file mode 100644 index 000000000..ba2fc4b60 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Executor/Reverting.t.sol @@ -0,0 +1,129 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Vm} from "forge-std/Test.sol"; +import {Utils, L2_SYSTEM_CONTEXT_ADDRESS} from "../Utils/Utils.sol"; + +import {ExecutorTest, POINT_EVALUATION_PRECOMPILE_RESULT, EMPTY_PREPUBLISHED_COMMITMENT} from "./_Executor_Shared.t.sol"; + +import {COMMIT_TIMESTAMP_NOT_OLDER, POINT_EVALUATION_PRECOMPILE_ADDR} from "contracts/common/Config.sol"; +import {IExecutor, SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {RevertedBatchNotAfterNewLastBatch} from "contracts/common/L1ContractErrors.sol"; + +contract RevertingTest is ExecutorTest { + bytes32 l2DAValidatorOutputHash; + bytes32[] blobVersionedHashes; + bytes operatorDAInput; + + function setUp() public { + setUpCommitBatch(); + + vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1); + currentTimestamp = block.timestamp; + + bytes[] memory correctL2Logs = Utils.createSystemLogs(l2DAValidatorOutputHash); + correctL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + bytes memory l2Logs = Utils.encodePacked(correctL2Logs); + newCommitBatchInfo.timestamp = uint64(currentTimestamp); + newCommitBatchInfo.systemLogs = l2Logs; + newCommitBatchInfo.operatorDAInput = operatorDAInput; + + IExecutor.CommitBatchInfo[] memory commitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + commitBatchInfoArray[0] = newCommitBatchInfo; + + vm.prank(validator); + vm.blobhashes(blobVersionedHashes); + vm.recordLogs(); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + commitBatchInfoArray + ); + executor.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + Vm.Log[] memory entries = vm.getRecordedLogs(); + + newStoredBatchInfo = IExecutor.StoredBatchInfo({ + batchNumber: 1, + batchHash: entries[0].topics[2], + indexRepeatedStorageChanges: 0, + numberOfLayer1Txs: 0, + priorityOperationsHash: keccak256(""), + l2LogsTreeRoot: 0, + timestamp: currentTimestamp, + commitment: entries[0].topics[3] + }); + + IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); + storedBatchInfoArray[0] = newStoredBatchInfo; + + vm.prank(validator); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + storedBatchInfoArray, + proofInput + ); + executor.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); + } + + function setUpCommitBatch() public { + bytes1 source = bytes1(0x01); + bytes memory defaultBlobCommitment = Utils.getDefaultBlobCommitment(); + + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 1; + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + source, + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + l2DAValidatorOutputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + blobVersionedHashes = new bytes32[](1); + blobVersionedHashes[0] = 0x01c024b4740620a5849f95930cefe298933bdf588123ea897cdf0f2462f6d2d5; + + bytes memory precompileInput = Utils.defaultPointEvaluationPrecompileInput(blobVersionedHashes[0]); + vm.mockCall(POINT_EVALUATION_PRECOMPILE_ADDR, precompileInput, POINT_EVALUATION_PRECOMPILE_RESULT); + } + + function test_RevertWhen_RevertingMoreBatchesThanAlreadyCommitted() public { + vm.prank(validator); + vm.expectRevert(RevertedBatchNotAfterNewLastBatch.selector); + executor.revertBatchesSharedBridge(0, 10); + } + + function test_SuccessfulRevert() public { + uint256 totalBlocksCommittedBefore = getters.getTotalBlocksCommitted(); + assertEq(totalBlocksCommittedBefore, 1, "totalBlocksCommittedBefore"); + + uint256 totalBlocksVerifiedBefore = getters.getTotalBlocksVerified(); + assertEq(totalBlocksVerifiedBefore, 1, "totalBlocksVerifiedBefore"); + + vm.prank(validator); + executor.revertBatchesSharedBridge(0, 0); + + uint256 totalBlocksCommitted = getters.getTotalBlocksCommitted(); + assertEq(totalBlocksCommitted, 0, "totalBlocksCommitted"); + + uint256 totalBlocksVerified = getters.getTotalBlocksVerified(); + assertEq(totalBlocksVerified, 0, "totalBlocksVerified"); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Executor/_Executor_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Executor/_Executor_Shared.t.sol similarity index 56% rename from l1-contracts/test/foundry/unit/concrete/Executor/_Executor_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Executor/_Executor_Shared.t.sol index b96602f63..eaa6ccf10 100644 --- a/l1-contracts/test/foundry/unit/concrete/Executor/_Executor_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Executor/_Executor_Shared.t.sol @@ -3,29 +3,45 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils, DEFAULT_L2_LOGS_TREE_ROOT_HASH} from "../Utils/Utils.sol"; +import {Utils, DEFAULT_L2_LOGS_TREE_ROOT_HASH, L2_DA_VALIDATOR_ADDRESS} from "../Utils/Utils.sol"; import {COMMIT_TIMESTAMP_NOT_OLDER, ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; import {DummyEraBaseTokenBridge} from "contracts/dev-contracts/test/DummyEraBaseTokenBridge.sol"; -import {DummyStateTransitionManager} from "contracts/dev-contracts/test/DummyStateTransitionManager.sol"; -import {IStateTransitionManager} from "contracts/state-transition/IStateTransitionManager.sol"; +import {DummyChainTypeManager} from "contracts/dev-contracts/test/DummyChainTypeManager.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; -import {VerifierParams, FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; +import {VerifierParams, FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; import {TestExecutor} from "contracts/dev-contracts/test/TestExecutor.sol"; +import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; import {InitializeData} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; -import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {IExecutor, TOTAL_BLOBS_IN_COMMITMENT} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; import {IVerifier} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; +import {IL1DAValidator} from "contracts/state-transition/chain-interfaces/IL1DAValidator.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; +import {Verifier} from "contracts/state-transition/Verifier.sol"; +import {VerifierFflonk} from "contracts/state-transition/VerifierFflonk.sol"; +import {DummyBridgehub} from "contracts/dev-contracts/test/DummyBridgehub.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {RollupL1DAValidator} from "da-contracts/RollupL1DAValidator.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IAssetRouterBase} from "contracts/bridge/asset-router/IAssetRouterBase.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; + +bytes32 constant EMPTY_PREPUBLISHED_COMMITMENT = 0x0000000000000000000000000000000000000000000000000000000000000000; +bytes constant POINT_EVALUATION_PRECOMPILE_RESULT = hex"000000000000000000000000000000000000000000000000000000000000100073eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001"; contract ExecutorTest is Test { address internal owner; address internal validator; address internal randomSigner; address internal blobVersionedHashRetriever; + address internal l1DAValidator; AdminFacet internal admin; TestExecutor internal executor; GettersFacet internal getters; @@ -35,14 +51,17 @@ contract ExecutorTest is Test { uint256 internal currentTimestamp; IExecutor.CommitBatchInfo internal newCommitBatchInfo; IExecutor.StoredBatchInfo internal newStoredBatchInfo; + DummyEraBaseTokenBridge internal sharedBridge; + RollupL1DAValidator internal rollupL1DAValidator; + MessageRoot internal messageRoot; uint256 eraChainId; IExecutor.StoredBatchInfo internal genesisStoredBatchInfo; - IExecutor.ProofInput internal proofInput; + uint256[] internal proofInput; function getAdminSelectors() private view returns (bytes4[] memory) { - bytes4[] memory selectors = new bytes4[](11); + bytes4[] memory selectors = new bytes4[](12); selectors[0] = admin.setPendingAdmin.selector; selectors[1] = admin.acceptAdmin.selector; selectors[2] = admin.setValidator.selector; @@ -54,48 +73,53 @@ contract ExecutorTest is Test { selectors[8] = admin.executeUpgrade.selector; selectors[9] = admin.freezeDiamond.selector; selectors[10] = admin.unfreezeDiamond.selector; + selectors[11] = admin.setDAValidatorPair.selector; return selectors; } function getExecutorSelectors() private view returns (bytes4[] memory) { - bytes4[] memory selectors = new bytes4[](4); - selectors[0] = executor.commitBatches.selector; - selectors[1] = executor.proveBatches.selector; - selectors[2] = executor.executeBatches.selector; - selectors[3] = executor.revertBatches.selector; + bytes4[] memory selectors = new bytes4[](5); + selectors[0] = executor.commitBatchesSharedBridge.selector; + selectors[1] = executor.proveBatchesSharedBridge.selector; + selectors[2] = executor.executeBatchesSharedBridge.selector; + selectors[3] = executor.revertBatchesSharedBridge.selector; + selectors[4] = executor.setPriorityTreeStartIndex.selector; return selectors; } function getGettersSelectors() public view returns (bytes4[] memory) { - bytes4[] memory selectors = new bytes4[](28); - selectors[0] = getters.getVerifier.selector; - selectors[1] = getters.getAdmin.selector; - selectors[2] = getters.getPendingAdmin.selector; - selectors[3] = getters.getTotalBlocksCommitted.selector; - selectors[4] = getters.getTotalBlocksVerified.selector; - selectors[5] = getters.getTotalBlocksExecuted.selector; - selectors[6] = getters.getTotalPriorityTxs.selector; - selectors[7] = getters.getFirstUnprocessedPriorityTx.selector; - selectors[8] = getters.getPriorityQueueSize.selector; - selectors[9] = getters.priorityQueueFrontOperation.selector; - selectors[10] = getters.isValidator.selector; - selectors[11] = getters.l2LogsRootHash.selector; - selectors[12] = getters.storedBatchHash.selector; - selectors[13] = getters.getL2BootloaderBytecodeHash.selector; - selectors[14] = getters.getL2DefaultAccountBytecodeHash.selector; - selectors[15] = getters.getVerifierParams.selector; - selectors[16] = getters.isDiamondStorageFrozen.selector; - selectors[17] = getters.getPriorityTxMaxGasLimit.selector; - selectors[18] = getters.isEthWithdrawalFinalized.selector; - selectors[19] = getters.facets.selector; - selectors[20] = getters.facetFunctionSelectors.selector; - selectors[21] = getters.facetAddresses.selector; - selectors[22] = getters.facetAddress.selector; - selectors[23] = getters.isFunctionFreezable.selector; - selectors[24] = getters.isFacetFreezable.selector; - selectors[25] = getters.getTotalBatchesCommitted.selector; - selectors[26] = getters.getTotalBatchesVerified.selector; - selectors[27] = getters.getTotalBatchesExecuted.selector; + bytes4[] memory selectors = new bytes4[](31); + selectors[0] = getters.getDualVerifier.selector; + selectors[1] = getters.getPlonkVerifier.selector; + selectors[2] = getters.getFflonkVerifier.selector; + selectors[3] = getters.getFflonkProofLength.selector; + selectors[4] = getters.getAdmin.selector; + selectors[5] = getters.getPendingAdmin.selector; + selectors[6] = getters.getTotalBlocksCommitted.selector; + selectors[7] = getters.getTotalBlocksVerified.selector; + selectors[8] = getters.getTotalBlocksExecuted.selector; + selectors[9] = getters.getTotalPriorityTxs.selector; + selectors[10] = getters.getFirstUnprocessedPriorityTx.selector; + selectors[11] = getters.getPriorityQueueSize.selector; + selectors[12] = getters.getTotalBatchesExecuted.selector; + selectors[13] = getters.isValidator.selector; + selectors[14] = getters.l2LogsRootHash.selector; + selectors[15] = getters.storedBatchHash.selector; + selectors[16] = getters.getL2BootloaderBytecodeHash.selector; + selectors[17] = getters.getL2DefaultAccountBytecodeHash.selector; + selectors[18] = getters.getVerifierParams.selector; + selectors[19] = getters.isDiamondStorageFrozen.selector; + selectors[20] = getters.getPriorityTxMaxGasLimit.selector; + selectors[21] = getters.isEthWithdrawalFinalized.selector; + selectors[22] = getters.facets.selector; + selectors[23] = getters.facetFunctionSelectors.selector; + selectors[24] = getters.facetAddresses.selector; + selectors[25] = getters.facetAddress.selector; + selectors[26] = getters.isFunctionFreezable.selector; + selectors[27] = getters.isFacetFreezable.selector; + selectors[28] = getters.getTotalBatchesCommitted.selector; + selectors[29] = getters.getTotalBatchesVerified.selector; + selectors[30] = getters.storedBlockHash.selector; return selectors; } @@ -126,18 +150,32 @@ contract ExecutorTest is Test { validator = makeAddr("validator"); randomSigner = makeAddr("randomSigner"); blobVersionedHashRetriever = makeAddr("blobVersionedHashRetriever"); + DummyBridgehub dummyBridgehub = new DummyBridgehub(); + messageRoot = new MessageRoot(IBridgehub(address(dummyBridgehub))); + dummyBridgehub.setMessageRoot(address(messageRoot)); + sharedBridge = new DummyEraBaseTokenBridge(); + + dummyBridgehub.setSharedBridge(address(sharedBridge)); + + vm.mockCall( + address(messageRoot), + abi.encodeWithSelector(MessageRoot.addChainBatchRoot.selector, 9, 1, bytes32(0)), + abi.encode() + ); eraChainId = 9; - executor = new TestExecutor(); - admin = new AdminFacet(); + rollupL1DAValidator = new RollupL1DAValidator(); + + admin = new AdminFacet(block.chainid); getters = new GettersFacet(); - mailbox = new MailboxFacet(eraChainId); + executor = new TestExecutor(); + mailbox = new MailboxFacet(eraChainId, block.chainid); - DummyStateTransitionManager stateTransitionManager = new DummyStateTransitionManager(); + DummyChainTypeManager chainTypeManager = new DummyChainTypeManager(); vm.mockCall( - address(stateTransitionManager), - abi.encodeWithSelector(IStateTransitionManager.protocolVersionIsActive.selector), + address(chainTypeManager), + abi.encodeWithSelector(IChainTypeManager.protocolVersionIsActive.selector), abi.encode(bool(true)) ); DiamondInit diamondInit = new DiamondInit(); @@ -160,15 +198,17 @@ contract ExecutorTest is Test { InitializeData memory params = InitializeData({ // TODO REVIEW chainId: eraChainId, - bridgehub: makeAddr("bridgehub"), - stateTransitionManager: address(stateTransitionManager), + bridgehub: address(dummyBridgehub), + chainTypeManager: address(chainTypeManager), protocolVersion: 0, admin: owner, validatorTimelock: validator, - baseToken: ETH_TOKEN_ADDRESS, - baseTokenBridge: address(new DummyEraBaseTokenBridge()), + baseTokenAssetId: DataEncoding.encodeNTVAssetId(block.chainid, ETH_TOKEN_ADDRESS), storedBatchZero: keccak256(abi.encode(genesisStoredBatchInfo)), - verifier: IVerifier(testnetVerifier), // verifier + dualVerifier: IVerifier(testnetVerifier), // verifier + plonkVerifier: new Verifier(), + fflonkVerifier: new VerifierFflonk(), + fflonkProofLength: 0, verifierParams: VerifierParams({ recursionNodeLevelVkHash: 0, recursionLeafLevelVkHash: 0, @@ -224,19 +264,17 @@ contract ExecutorTest is Test { admin = AdminFacet(address(diamondProxy)); // Initiate the token multiplier to enable L1 -> L2 transactions. - vm.prank(address(stateTransitionManager)); + vm.prank(address(chainTypeManager)); admin.setTokenMultiplier(1, 1); - - uint256[] memory recursiveAggregationInput; - uint256[] memory serializedProof; - proofInput = IExecutor.ProofInput(recursiveAggregationInput, serializedProof); + vm.prank(address(owner)); + admin.setDAValidatorPair(address(rollupL1DAValidator), L2_DA_VALIDATOR_ADDRESS); // foundry's default value is 1 for the block's timestamp, it is expected // that block.timestamp > COMMIT_TIMESTAMP_NOT_OLDER + 1 vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1 + 1); currentTimestamp = block.timestamp; - bytes memory l2Logs = Utils.encodePacked(Utils.createSystemLogs()); + bytes memory l2Logs = Utils.encodePacked(Utils.createSystemLogs(bytes32(0))); newCommitBatchInfo = IExecutor.CommitBatchInfo({ batchNumber: 1, timestamp: uint64(currentTimestamp), @@ -247,8 +285,14 @@ contract ExecutorTest is Test { bootloaderHeapInitialContentsHash: Utils.randomBytes32("bootloaderHeapInitialContentsHash"), eventsQueueStateHash: Utils.randomBytes32("eventsQueueStateHash"), systemLogs: l2Logs, - pubdataCommitments: "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + operatorDAInput: "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" }); + + vm.mockCall( + address(sharedBridge), + abi.encodeWithSelector(IL1AssetRouter.bridgehubDepositBaseToken.selector), + abi.encode(true) + ); } // add this to be excluded from coverage report diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Governance/AccessControlRestriction.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/AccessControlRestriction.t.sol new file mode 100644 index 000000000..1cd471413 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Governance/AccessControlRestriction.t.sol @@ -0,0 +1,186 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; + +import "@openzeppelin/contracts-v4/utils/Strings.sol"; +import "forge-std/console.sol"; +import {IChainAdmin} from "contracts/governance/IChainAdmin.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {IAccessControlRestriction} from "contracts/governance/IAccessControlRestriction.sol"; +import {Utils} from "test/foundry/l1/unit/concrete/Utils/Utils.sol"; +import {NoCallsProvided, AccessToFallbackDenied, AccessToFunctionDenied} from "contracts/common/L1ContractErrors.sol"; +import {Call} from "contracts/governance/Common.sol"; + +contract AccessRestrictionTest is Test { + AccessControlRestriction internal restriction; + ChainAdmin internal chainAdmin; + address owner; + address randomCaller; + bytes32 public constant DEFAULT_ADMIN_ROLE = 0x00; + + function getChainAdminSelectors() public pure returns (bytes4[] memory) { + bytes4[] memory selectors = new bytes4[](12); + selectors[0] = IChainAdmin.getRestrictions.selector; + selectors[1] = IChainAdmin.isRestrictionActive.selector; + selectors[2] = IChainAdmin.addRestriction.selector; + selectors[3] = IChainAdmin.removeRestriction.selector; + + return selectors; + } + + function setUp() public { + owner = makeAddr("random address"); + randomCaller = makeAddr("random caller"); + + restriction = new AccessControlRestriction(0, owner); + address[] memory restrictions = new address[](1); + restrictions[0] = address(restriction); + + chainAdmin = new ChainAdmin(restrictions); + } + + function test_adminAsAddressZero() public { + vm.expectRevert("AccessControl: 0 default admin"); + new AccessControlRestriction(0, address(0)); + } + + function test_setRequiredRoleForCallByNotDefaultAdmin(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + bytes4[] memory chainAdminSelectors = getChainAdminSelectors(); + string memory revertMsg = string( + abi.encodePacked( + "AccessControl: account ", + Strings.toHexString(uint160(randomCaller), 20), + " is missing role ", + Strings.toHexString(uint256(DEFAULT_ADMIN_ROLE), 32) + ) + ); + + vm.expectRevert(bytes(revertMsg)); + vm.prank(randomCaller); + restriction.setRequiredRoleForCall(address(chainAdmin), chainAdminSelectors[0], role); + } + + function test_setRequiredRoleForCallAccessToFunctionDenied(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + bytes4[] memory chainAdminSelectors = getChainAdminSelectors(); + + vm.startPrank(owner); + restriction.setRequiredRoleForCall(address(chainAdmin), chainAdminSelectors[0], role); + vm.stopPrank(); + + Call memory call = Call({ + target: address(chainAdmin), + value: 0, + data: abi.encodeCall(IChainAdmin.getRestrictions, ()) + }); + + vm.expectRevert( + abi.encodeWithSelector( + AccessToFunctionDenied.selector, + address(chainAdmin), + chainAdminSelectors[0], + randomCaller + ) + ); + restriction.validateCall(call, randomCaller); + } + + function test_setRequiredRoleForCall(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + bytes4[] memory chainAdminSelectors = getChainAdminSelectors(); + + vm.expectEmit(true, true, false, true); + emit IAccessControlRestriction.RoleSet(address(chainAdmin), chainAdminSelectors[0], role); + + vm.startPrank(owner); + restriction.setRequiredRoleForCall(address(chainAdmin), chainAdminSelectors[0], role); + restriction.grantRole(role, randomCaller); + vm.stopPrank(); + + Call memory call = Call({ + target: address(chainAdmin), + value: 0, + data: abi.encodeCall(IChainAdmin.getRestrictions, ()) + }); + restriction.validateCall(call, randomCaller); + } + + function test_setRequiredRoleForFallbackByNotDefaultAdmin(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + string memory revertMsg = string( + abi.encodePacked( + "AccessControl: account ", + Strings.toHexString(uint160(randomCaller), 20), + " is missing role ", + Strings.toHexString(uint256(DEFAULT_ADMIN_ROLE), 32) + ) + ); + + vm.expectRevert(bytes(revertMsg)); + vm.prank(randomCaller); + restriction.setRequiredRoleForFallback(address(chainAdmin), role); + } + + function test_setRequiredRoleForFallbackAccessToFallbackDenied(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + vm.startPrank(owner); + restriction.setRequiredRoleForFallback(address(chainAdmin), role); + vm.stopPrank(); + + Call memory call = Call({target: address(chainAdmin), value: 0, data: ""}); + + vm.expectRevert(abi.encodeWithSelector(AccessToFallbackDenied.selector, address(chainAdmin), randomCaller)); + restriction.validateCall(call, randomCaller); + } + + function test_setRequiredRoleForFallback(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + vm.expectEmit(true, false, false, true); + emit IAccessControlRestriction.FallbackRoleSet(address(chainAdmin), role); + + vm.startPrank(owner); + restriction.setRequiredRoleForFallback(address(chainAdmin), role); + restriction.grantRole(role, randomCaller); + vm.stopPrank(); + + Call memory call = Call({target: address(chainAdmin), value: 0, data: ""}); + restriction.validateCall(call, randomCaller); + } + + function test_validateCallFunction(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + bytes4[] memory chainAdminSelectors = getChainAdminSelectors(); + vm.startPrank(owner); + restriction.setRequiredRoleForCall(address(chainAdmin), chainAdminSelectors[0], role); + restriction.grantRole(role, randomCaller); + vm.stopPrank(); + + Call memory call = Call({ + target: address(chainAdmin), + value: 0, + data: abi.encodeCall(IChainAdmin.getRestrictions, ()) + }); + restriction.validateCall(call, randomCaller); + } + + function test_validateCallFallback(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + vm.startPrank(owner); + restriction.setRequiredRoleForFallback(address(chainAdmin), role); + restriction.grantRole(role, randomCaller); + vm.stopPrank(); + + Call memory call = Call({target: address(chainAdmin), value: 0, data: ""}); + restriction.validateCall(call, randomCaller); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Governance/Authorization.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/Authorization.t.sol similarity index 79% rename from l1-contracts/test/foundry/unit/concrete/Governance/Authorization.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Governance/Authorization.t.sol index 540870032..5cc75bf06 100644 --- a/l1-contracts/test/foundry/unit/concrete/Governance/Authorization.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Governance/Authorization.t.sol @@ -3,6 +3,7 @@ pragma solidity 0.8.24; import {GovernanceTest} from "./_Governance_Shared.t.sol"; import {IGovernance} from "contracts/governance/IGovernance.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; contract Authorization is GovernanceTest { function test_RevertWhen_SchedulingByUnauthorisedAddress() public { @@ -33,21 +34,21 @@ contract Authorization is GovernanceTest { function test_RevertWhen_ExecutingByUnauthorisedAddress() public { vm.prank(randomSigner); - vm.expectRevert("Only the owner and security council are allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomSigner)); IGovernance.Operation memory op = operationWithOneCallZeroSaltAndPredecessor(address(eventOnFallback), 0, ""); governance.execute(op); } function test_RevertWhen_ExecutingInstantByUnauthorisedAddress() public { vm.prank(randomSigner); - vm.expectRevert("Only security council is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomSigner)); IGovernance.Operation memory op = operationWithOneCallZeroSaltAndPredecessor(address(eventOnFallback), 0, ""); governance.executeInstant(op); } function test_RevertWhen_ExecutingInstantByOwner() public { vm.prank(owner); - vm.expectRevert("Only security council is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, owner)); IGovernance.Operation memory op = operationWithOneCallZeroSaltAndPredecessor(address(eventOnFallback), 0, ""); governance.executeInstant(op); } @@ -60,37 +61,37 @@ contract Authorization is GovernanceTest { function test_RevertWhen_UpdateDelayByUnauthorisedAddress() public { vm.prank(randomSigner); - vm.expectRevert("Only governance contract itself is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomSigner)); governance.updateDelay(0); } function test_RevertWhen_UpdateDelayByOwner() public { vm.prank(owner); - vm.expectRevert("Only governance contract itself is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, owner)); governance.updateDelay(0); } function test_RevertWhen_UpdateDelayBySecurityCouncil() public { vm.prank(securityCouncil); - vm.expectRevert("Only governance contract itself is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, securityCouncil)); governance.updateDelay(0); } function test_RevertWhen_UpdateSecurityCouncilByUnauthorisedAddress() public { vm.prank(randomSigner); - vm.expectRevert("Only governance contract itself is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, randomSigner)); governance.updateSecurityCouncil(address(0)); } function test_RevertWhen_UpdateSecurityCouncilByOwner() public { vm.prank(owner); - vm.expectRevert("Only governance contract itself is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, owner)); governance.updateSecurityCouncil(address(0)); } function test_RevertWhen_UpdateSecurityCouncilBySecurityCouncil() public { vm.prank(securityCouncil); - vm.expectRevert("Only governance contract itself is allowed to call this function"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, securityCouncil)); governance.updateSecurityCouncil(address(0)); } } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Governance/ChainAdmin.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/ChainAdmin.t.sol new file mode 100644 index 000000000..27624d503 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Governance/ChainAdmin.t.sol @@ -0,0 +1,177 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; + +import "@openzeppelin/contracts-v4/utils/Strings.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {IChainAdmin} from "contracts/governance/IChainAdmin.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {Call} from "contracts/governance/Common.sol"; +import {NoCallsProvided, RestrictionWasAlreadyPresent, RestrictionWasNotPresent, AccessToFallbackDenied, AccessToFunctionDenied} from "contracts/common/L1ContractErrors.sol"; +import {Utils} from "test/foundry/l1/unit/concrete/Utils/Utils.sol"; + +contract ChainAdminTest is Test { + ChainAdmin internal chainAdmin; + AccessControlRestriction internal restriction; + GettersFacet internal gettersFacet; + + address internal owner; + uint32 internal major; + uint32 internal minor; + uint32 internal patch; + bytes32 public constant DEFAULT_ADMIN_ROLE = 0x00; + + function setUp() public { + owner = makeAddr("random address"); + + restriction = new AccessControlRestriction(0, owner); + address[] memory restrictions = new address[](1); + restrictions[0] = address(restriction); + + chainAdmin = new ChainAdmin(restrictions); + + gettersFacet = new GettersFacet(); + } + + function test_getRestrictions() public { + address[] memory restrictions = chainAdmin.getRestrictions(); + assertEq(restrictions[0], address(restriction)); + } + + function test_isRestrictionActive() public { + bool isActive = chainAdmin.isRestrictionActive(address(restriction)); + assertEq(isActive, true); + } + + function test_addRestriction() public { + address[] memory restrictions = chainAdmin.getRestrictions(); + + vm.expectEmit(true, false, false, true); + emit IChainAdmin.RestrictionAdded(owner); + + vm.prank(address(chainAdmin)); + chainAdmin.addRestriction(owner); + } + + function test_addRestrictionRevert() public { + vm.startPrank(address(chainAdmin)); + chainAdmin.addRestriction(owner); + + vm.expectRevert(abi.encodeWithSelector(RestrictionWasAlreadyPresent.selector, owner)); + chainAdmin.addRestriction(owner); + vm.stopPrank(); + } + + function test_removeRestriction() public { + address[] memory restrictions = chainAdmin.getRestrictions(); + + vm.startPrank(address(chainAdmin)); + chainAdmin.addRestriction(owner); + + vm.expectEmit(true, false, false, true); + emit IChainAdmin.RestrictionRemoved(owner); + + chainAdmin.removeRestriction(owner); + vm.stopPrank(); + } + + function test_removeRestrictionRevert() public { + address[] memory restrictions = chainAdmin.getRestrictions(); + + vm.startPrank(address(chainAdmin)); + chainAdmin.addRestriction(owner); + chainAdmin.removeRestriction(owner); + + vm.expectRevert(abi.encodeWithSelector(RestrictionWasNotPresent.selector, owner)); + chainAdmin.removeRestriction(owner); + vm.stopPrank(); + } + + function test_setUpgradeTimestamp(uint256 semverMinorVersionMultiplier, uint256 timestamp) public { + (major, minor, patch) = gettersFacet.getSemverProtocolVersion(); + uint256 protocolVersion = packSemver(major, minor, patch + 1, semverMinorVersionMultiplier); + + vm.expectEmit(true, false, false, true); + emit IChainAdmin.UpdateUpgradeTimestamp(protocolVersion, timestamp); + + vm.prank(address(chainAdmin)); + chainAdmin.setUpgradeTimestamp(protocolVersion, timestamp); + } + + function test_multicallRevertNoCalls() public { + Call[] memory calls = new Call[](0); + + vm.expectRevert(NoCallsProvided.selector); + chainAdmin.multicall(calls, false); + } + + function test_multicallRevertFailedCall() public { + Call[] memory calls = new Call[](1); + calls[0] = Call({target: address(chainAdmin), value: 0, data: abi.encodeCall(gettersFacet.getAdmin, ())}); + + vm.expectRevert(); + vm.prank(owner); + chainAdmin.multicall(calls, true); + } + + function test_validateCallAccessToFunctionDenied(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + Call[] memory calls = new Call[](2); + calls[0] = Call({target: address(gettersFacet), value: 0, data: abi.encodeCall(gettersFacet.getAdmin, ())}); + calls[1] = Call({target: address(gettersFacet), value: 0, data: abi.encodeCall(gettersFacet.getVerifier, ())}); + + vm.prank(owner); + restriction.setRequiredRoleForCall(address(gettersFacet), gettersFacet.getAdmin.selector, role); + + vm.expectRevert( + abi.encodeWithSelector( + AccessToFunctionDenied.selector, + address(gettersFacet), + gettersFacet.getAdmin.selector, + owner + ) + ); + vm.prank(owner); + chainAdmin.multicall(calls, true); + } + + function test_validateCallAccessToFallbackDenied(bytes32 role) public { + vm.assume(role != DEFAULT_ADMIN_ROLE); + + Call[] memory calls = new Call[](2); + calls[0] = Call({target: address(gettersFacet), value: 0, data: ""}); + calls[1] = Call({target: address(gettersFacet), value: 0, data: abi.encodeCall(gettersFacet.getVerifier, ())}); + + vm.prank(owner); + restriction.setRequiredRoleForFallback(address(gettersFacet), role); + + vm.expectRevert(abi.encodeWithSelector(AccessToFallbackDenied.selector, address(gettersFacet), owner)); + vm.prank(owner); + chainAdmin.multicall(calls, true); + } + + function test_multicall() public { + Call[] memory calls = new Call[](2); + calls[0] = Call({target: address(gettersFacet), value: 0, data: abi.encodeCall(gettersFacet.getAdmin, ())}); + calls[1] = Call({target: address(gettersFacet), value: 0, data: abi.encodeCall(gettersFacet.getVerifier, ())}); + + vm.prank(owner); + chainAdmin.multicall(calls, true); + } + + function packSemver( + uint32 major, + uint32 minor, + uint32 patch, + uint256 semverMinorVersionMultiplier + ) public returns (uint256) { + if (major != 0) { + revert("Major version must be 0"); + } + + return minor * semverMinorVersionMultiplier + patch; + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Governance/Executing.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/Executing.t.sol similarity index 90% rename from l1-contracts/test/foundry/unit/concrete/Governance/Executing.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Governance/Executing.t.sol index 160cee2f6..9a1e5eeb2 100644 --- a/l1-contracts/test/foundry/unit/concrete/Governance/Executing.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Governance/Executing.t.sol @@ -7,6 +7,7 @@ import {Utils} from "../Utils/Utils.sol"; import {GovernanceTest} from "./_Governance_Shared.t.sol"; import {IGovernance} from "contracts/governance/IGovernance.sol"; +import {OperationMustBeReady, OperationMustBePending, OperationExists, PreviousOperationNotExecuted, InvalidDelay} from "contracts/common/L1ContractErrors.sol"; contract ExecutingTest is GovernanceTest { using stdStorage for StdStorage; @@ -51,7 +52,7 @@ contract ExecutingTest is GovernanceTest { vm.startPrank(owner); IGovernance.Operation memory op = operationWithOneCallZeroSaltAndPredecessor(address(eventOnFallback), 0, ""); governance.scheduleTransparent(op, 10000); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(op); } @@ -65,7 +66,7 @@ contract ExecutingTest is GovernanceTest { governance.scheduleTransparent(validOp, 0); IGovernance.Operation memory invalidOp = operationWithOneCallZeroSaltAndPredecessor(address(0), 0, ""); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(invalidOp); } @@ -83,7 +84,7 @@ contract ExecutingTest is GovernanceTest { 1, "" ); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(invalidOp); } @@ -101,7 +102,7 @@ contract ExecutingTest is GovernanceTest { 0, "00" ); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(invalidOp); } @@ -133,7 +134,7 @@ contract ExecutingTest is GovernanceTest { invalidOp.predecessor = governance.hashOperation(executedOp); // Failed to execute operation that wasn't scheduled - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(invalidOp); } @@ -152,7 +153,7 @@ contract ExecutingTest is GovernanceTest { "" ); invalidOp.salt = Utils.randomBytes32("wrongSalt"); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(invalidOp); } @@ -166,7 +167,7 @@ contract ExecutingTest is GovernanceTest { ); invalidOp.predecessor = Utils.randomBytes32("randomPredecessor"); governance.scheduleTransparent(invalidOp, 0); - vm.expectRevert("Predecessor operation not completed"); + vm.expectRevert(PreviousOperationNotExecuted.selector); governance.execute(invalidOp); } @@ -181,7 +182,7 @@ contract ExecutingTest is GovernanceTest { governance.scheduleTransparent(op, 0); executeOpAndCheck(op); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(op); } @@ -193,7 +194,7 @@ contract ExecutingTest is GovernanceTest { 0, "1122" ); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(op); } @@ -205,7 +206,7 @@ contract ExecutingTest is GovernanceTest { 0, "1122" ); - vm.expectRevert("Operation must be pending before execution"); + vm.expectRevert(OperationMustBePending.selector); governance.executeInstant(op); } @@ -219,7 +220,7 @@ contract ExecutingTest is GovernanceTest { ); governance.scheduleTransparent(op, 0); governance.cancel(governance.hashOperation(op)); - vm.expectRevert("Operation must be ready before execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(op); } @@ -247,7 +248,7 @@ contract ExecutingTest is GovernanceTest { ); governance.scheduleTransparent(op, 0); executeOpAndCheck(op); - vm.expectRevert("Operation with this proposal id already exists"); + vm.expectRevert(OperationExists.selector); governance.scheduleTransparent(op, 0); } @@ -270,7 +271,7 @@ contract ExecutingTest is GovernanceTest { function test_RevertWhen_CancelNonExistingOperation() public { vm.startPrank(owner); - vm.expectRevert("Operation must be pending"); + vm.expectRevert(OperationMustBePending.selector); governance.cancel(bytes32(0)); } @@ -279,7 +280,7 @@ contract ExecutingTest is GovernanceTest { stdstore.target(address(governance)).sig(governance.minDelay.selector).checked_write(1000); IGovernance.Operation memory op = operationWithOneCallZeroSaltAndPredecessor(address(revertFallback), 0, ""); - vm.expectRevert("Proposed delay is less than minimum delay"); + vm.expectRevert(InvalidDelay.selector); governance.scheduleTransparent(op, 0); } } diff --git a/l1-contracts/test/foundry/unit/concrete/Governance/Fallback.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/Fallback.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/Governance/Fallback.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Governance/Fallback.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/Governance/OperationStatus.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/OperationStatus.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/Governance/OperationStatus.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Governance/OperationStatus.t.sol diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Governance/PermanentRestriction.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/PermanentRestriction.t.sol new file mode 100644 index 000000000..bcfe6ae2c --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Governance/PermanentRestriction.t.sol @@ -0,0 +1,389 @@ +pragma solidity 0.8.24; + +import "@openzeppelin/contracts-v4/utils/Strings.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {L2TransactionRequestTwoBridgesOuter, BridgehubBurnCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {ChainTypeManager} from "contracts/state-transition/ChainTypeManager.sol"; +import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; +import {PermanentRestriction, MIN_GAS_FOR_FALLABLE_CALL} from "contracts/governance/PermanentRestriction.sol"; +import {IPermanentRestriction} from "contracts/governance/IPermanentRestriction.sol"; +import {NotAllowed, NotEnoughGas, InvalidAddress, UnsupportedEncodingVersion, InvalidSelector, NotBridgehub, ZeroAddress, ChainZeroAddress, NotAnAdmin, UnallowedImplementation, RemovingPermanentRestriction, CallNotAllowed} from "contracts/common/L1ContractErrors.sol"; +import {Call} from "contracts/governance/Common.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; +import {VerifierParams, FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {AccessControlRestriction} from "contracts/governance/AccessControlRestriction.sol"; +import {ChainAdmin} from "contracts/governance/ChainAdmin.sol"; +import {IChainAdmin} from "contracts/governance/IChainAdmin.sol"; +import {ChainTypeManagerTest} from "test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/_ChainTypeManager_Shared.t.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; + +contract PermanentRestrictionTest is ChainTypeManagerTest { + ChainAdmin internal chainAdmin; + AccessControlRestriction internal restriction; + PermanentRestriction internal permRestriction; + + address constant L2_FACTORY_ADDR = address(0); + + address internal owner; + address internal hyperchain; + + function setUp() public { + deploy(); + + createNewChainBridgehub(); + + owner = makeAddr("owner"); + hyperchain = chainContractAddress.getHyperchain(chainId); + (permRestriction, ) = _deployPermRestriction(bridgehub, L2_FACTORY_ADDR, owner); + restriction = new AccessControlRestriction(0, owner); + address[] memory restrictions = new address[](1); + restrictions[0] = address(restriction); + chainAdmin = new ChainAdmin(restrictions); + } + + function _deployPermRestriction( + IBridgehub _bridgehub, + address _l2AdminFactory, + address _owner + ) internal returns (PermanentRestriction proxy, PermanentRestriction impl) { + impl = new PermanentRestriction(_bridgehub, _l2AdminFactory); + TransparentUpgradeableProxy tup = new TransparentUpgradeableProxy( + address(impl), + address(uint160(1)), + abi.encodeCall(PermanentRestriction.initialize, (_owner)) + ); + + proxy = PermanentRestriction(address(tup)); + } + + function test_ownerAsAddressZero() public { + PermanentRestriction impl = new PermanentRestriction(bridgehub, L2_FACTORY_ADDR); + vm.expectRevert(ZeroAddress.selector); + new TransparentUpgradeableProxy( + address(impl), + address(uint160(1)), + abi.encodeCall(PermanentRestriction.initialize, (address(0))) + ); + } + + function test_allowAdminImplementation(bytes32 implementationHash) public { + vm.expectEmit(true, false, false, true); + emit IPermanentRestriction.AdminImplementationAllowed(implementationHash, true); + + vm.prank(owner); + permRestriction.allowAdminImplementation(implementationHash, true); + } + + function test_setAllowedData(bytes memory data) public { + vm.expectEmit(false, false, false, true); + emit IPermanentRestriction.AllowedDataChanged(data, true); + + vm.prank(owner); + permRestriction.setAllowedData(data, true); + } + + function test_setSelectorIsValidated(bytes4 selector) public { + vm.expectEmit(true, false, false, true); + emit IPermanentRestriction.SelectorValidationChanged(selector, true); + + vm.prank(owner); + permRestriction.setSelectorIsValidated(selector, true); + } + + function test_tryCompareAdminOfAChainIsAddressZero() public { + vm.expectRevert(ChainZeroAddress.selector); + permRestriction.tryCompareAdminOfAChain(address(0), owner); + } + + function test_tryCompareAdminOfAChainNotAHyperchain() public { + vm.expectRevert(); + permRestriction.tryCompareAdminOfAChain(makeAddr("random"), owner); + } + + function test_tryCompareAdminOfAChainNotAnAdmin() public { + vm.expectRevert(abi.encodeWithSelector(NotAnAdmin.selector, IZKChain(hyperchain).getAdmin(), owner)); + permRestriction.tryCompareAdminOfAChain(hyperchain, owner); + } + + function test_tryCompareAdminOfAChain() public { + permRestriction.tryCompareAdminOfAChain(hyperchain, newChainAdmin); + } + + function test_validateCallTooShortData() public { + Call memory call = Call({target: hyperchain, value: 0, data: ""}); + + vm.startPrank(newChainAdmin); + permRestriction.validateCall(call, owner); + vm.stopPrank(); + } + + function test_validateCallSetPendingAdminUnallowedImplementation() public { + Call memory call = Call({ + target: hyperchain, + value: 0, + data: abi.encodeWithSelector(IAdmin.setPendingAdmin.selector, owner) + }); + + vm.expectRevert(abi.encodeWithSelector(UnallowedImplementation.selector, owner.codehash)); + + vm.startPrank(newChainAdmin); + permRestriction.validateCall(call, owner); + vm.stopPrank(); + } + + function test_validateCallSetPendingAdminRemovingPermanentRestriction() public { + vm.prank(owner); + permRestriction.allowAdminImplementation(address(chainAdmin).codehash, true); + + Call memory call = Call({ + target: hyperchain, + value: 0, + data: abi.encodeWithSelector(IAdmin.setPendingAdmin.selector, address(chainAdmin)) + }); + + vm.expectRevert(RemovingPermanentRestriction.selector); + + vm.startPrank(newChainAdmin); + permRestriction.validateCall(call, owner); + vm.stopPrank(); + } + + function test_validateCallSetPendingAdmin() public { + vm.prank(owner); + permRestriction.allowAdminImplementation(address(chainAdmin).codehash, true); + + vm.prank(address(chainAdmin)); + chainAdmin.addRestriction(address(permRestriction)); + + Call memory call = Call({ + target: hyperchain, + value: 0, + data: abi.encodeWithSelector(IAdmin.setPendingAdmin.selector, address(chainAdmin)) + }); + + vm.startPrank(newChainAdmin); + permRestriction.validateCall(call, owner); + vm.stopPrank(); + } + + function test_validateCallNotValidatedSelector() public { + Call memory call = Call({ + target: hyperchain, + value: 0, + data: abi.encodeWithSelector(IAdmin.acceptAdmin.selector) + }); + + vm.startPrank(newChainAdmin); + permRestriction.validateCall(call, owner); + vm.stopPrank(); + } + + function test_validateCallCallNotAllowed() public { + vm.prank(owner); + permRestriction.setSelectorIsValidated(IAdmin.acceptAdmin.selector, true); + Call memory call = Call({ + target: hyperchain, + value: 0, + data: abi.encodeWithSelector(IAdmin.acceptAdmin.selector) + }); + + vm.expectRevert(abi.encodeWithSelector(CallNotAllowed.selector, call.data)); + + vm.startPrank(newChainAdmin); + permRestriction.validateCall(call, owner); + vm.stopPrank(); + } + + function test_validateCall() public { + vm.prank(owner); + permRestriction.setSelectorIsValidated(IAdmin.acceptAdmin.selector, true); + Call memory call = Call({ + target: hyperchain, + value: 0, + data: abi.encodeWithSelector(IAdmin.acceptAdmin.selector) + }); + + vm.prank(owner); + permRestriction.setAllowedData(call.data, true); + + vm.startPrank(newChainAdmin); + permRestriction.validateCall(call, owner); + vm.stopPrank(); + } + + function _encodeMigraationCall( + bool correctTarget, + bool correctSelector, + bool correctSecondBridge, + bool correctEncodingVersion, + bool correctAssetId, + address l2Admin + ) internal returns (Call memory call) { + if (!correctTarget) { + call.target = address(0); + return call; + } + call.target = address(bridgehub); + + if (!correctSelector) { + call.data = hex"00000000"; + return call; + } + + L2TransactionRequestTwoBridgesOuter memory outer = L2TransactionRequestTwoBridgesOuter({ + chainId: chainId, + mintValue: 0, + l2Value: 0, + l2GasLimit: 0, + l2GasPerPubdataByteLimit: 0, + refundRecipient: address(0), + secondBridgeAddress: address(0), + secondBridgeValue: 0, + secondBridgeCalldata: hex"" + }); + if (!correctSecondBridge) { + call.data = abi.encodeCall(Bridgehub.requestL2TransactionTwoBridges, (outer)); + // 0 is not correct second bridge + return call; + } + outer.secondBridgeAddress = sharedBridge; + + uint8 encoding = correctEncodingVersion ? 1 : 12; + + bytes32 chainAssetId = correctAssetId ? bridgehub.ctmAssetIdFromChainId(chainId) : bytes32(0); + + bytes memory bridgehubData = abi.encode( + BridgehubBurnCTMAssetData({ + // Gateway chain id, we do not need it + chainId: 0, + ctmData: abi.encode(l2Admin, hex""), + chainData: abi.encode(IZKChain(IBridgehub(bridgehub).getZKChain(chainId)).getProtocolVersion()) + }) + ); + outer.secondBridgeCalldata = abi.encodePacked(bytes1(encoding), abi.encode(chainAssetId, bridgehubData)); + + call.data = abi.encodeCall(Bridgehub.requestL2TransactionTwoBridges, (outer)); + } + + function test_tryGetNewAdminFromMigrationRevertWhenInvalidSelector() public { + Call memory call = _encodeMigraationCall(false, true, true, true, true, address(0)); + + vm.expectRevert(abi.encodeWithSelector(NotBridgehub.selector, address(0))); + permRestriction.tryGetNewAdminFromMigration(call); + } + + function test_tryGetNewAdminFromMigrationRevertWhenNotBridgehub() public { + Call memory call = _encodeMigraationCall(true, false, true, true, true, address(0)); + + vm.expectRevert(abi.encodeWithSelector(InvalidSelector.selector, bytes4(0))); + permRestriction.tryGetNewAdminFromMigration(call); + } + + function test_tryGetNewAdminFromMigrationRevertWhenNotSharedBridge() public { + Call memory call = _encodeMigraationCall(true, true, false, true, true, address(0)); + + vm.expectRevert(abi.encodeWithSelector(InvalidAddress.selector, address(sharedBridge), address(0))); + permRestriction.tryGetNewAdminFromMigration(call); + } + + function test_tryGetNewAdminFromMigrationRevertWhenIncorrectEncoding() public { + Call memory call = _encodeMigraationCall(true, true, true, false, true, address(0)); + + vm.expectRevert(abi.encodeWithSelector(UnsupportedEncodingVersion.selector)); + permRestriction.tryGetNewAdminFromMigration(call); + } + + function test_tryGetNewAdminFromMigrationRevertWhenIncorrectAssetId() public { + Call memory call = _encodeMigraationCall(true, true, true, true, false, address(0)); + + vm.expectRevert(abi.encodeWithSelector(ZeroAddress.selector)); + permRestriction.tryGetNewAdminFromMigration(call); + } + + function test_tryGetNewAdminFromMigrationShouldWorkCorrectly() public { + address l2Addr = makeAddr("l2Addr"); + Call memory call = _encodeMigraationCall(true, true, true, true, true, l2Addr); + + address result = permRestriction.tryGetNewAdminFromMigration(call); + assertEq(result, l2Addr); + } + + function test_validateMigrationToL2RevertNotAllowed() public { + Call memory call = _encodeMigraationCall(true, true, true, true, true, address(0)); + + vm.expectRevert(abi.encodeWithSelector(NotAllowed.selector, address(0))); + permRestriction.validateCall(call, owner); + } + + function test_validateMigrationToL2() public { + address expectedAddress = L2ContractHelper.computeCreate2Address( + L2_FACTORY_ADDR, + bytes32(0), + bytes32(0), + bytes32(0) + ); + + vm.expectEmit(true, false, false, true); + emit IPermanentRestriction.AllowL2Admin(expectedAddress); + permRestriction.allowL2Admin(bytes32(0), bytes32(0), bytes32(0)); + + Call memory call = _encodeMigraationCall(true, true, true, true, true, expectedAddress); + + // Should not fail + permRestriction.validateCall(call, owner); + } + + function test_validateNotEnoughGas() public { + address l2Addr = makeAddr("l2Addr"); + Call memory call = _encodeMigraationCall(true, true, true, true, true, l2Addr); + + vm.expectRevert(abi.encodeWithSelector(NotEnoughGas.selector)); + permRestriction.validateCall{gas: MIN_GAS_FOR_FALLABLE_CALL}(call, address(0)); + } + + function createNewChainBridgehub() internal { + bytes[] memory factoryDeps = new bytes[](0); + vm.stopPrank(); + vm.startPrank(governor); + bridgehub.addChainTypeManager(address(chainContractAddress)); + bridgehub.addTokenAssetId(DataEncoding.encodeNTVAssetId(block.chainid, baseToken)); + bridgehub.setAddresses(sharedBridge, ICTMDeploymentTracker(address(0)), new MessageRoot(bridgehub)); + vm.stopPrank(); + + // ctm deployer address is 0 in this test + vm.startPrank(address(0)); + bridgehub.setAssetHandlerAddress( + bytes32(uint256(uint160(address(chainContractAddress)))), + address(chainContractAddress) + ); + vm.stopPrank(); + + address l1Nullifier = makeAddr("l1Nullifier"); + vm.mockCall( + address(sharedBridge), + abi.encodeWithSelector(IL1AssetRouter.L1_NULLIFIER.selector), + abi.encode(l1Nullifier) + ); + vm.startPrank(governor); + bridgehub.createNewChain({ + _chainId: chainId, + _chainTypeManager: address(chainContractAddress), + _baseTokenAssetId: DataEncoding.encodeNTVAssetId(block.chainid, baseToken), + _salt: 0, + _admin: newChainAdmin, + _initData: getCTMInitData(), + _factoryDeps: factoryDeps + }); + vm.stopPrank(); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Governance/Reentrancy.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/Reentrancy.t.sol similarity index 95% rename from l1-contracts/test/foundry/unit/concrete/Governance/Reentrancy.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Governance/Reentrancy.t.sol index 1076d1015..1f6beb10a 100644 --- a/l1-contracts/test/foundry/unit/concrete/Governance/Reentrancy.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Governance/Reentrancy.t.sol @@ -9,6 +9,7 @@ import {GovernanceTest} from "./_Governance_Shared.t.sol"; import {IGovernance} from "contracts/governance/IGovernance.sol"; import {ReenterGovernance} from "contracts/dev-contracts/test/ReenterGovernance.sol"; +import {OperationMustBeReady, OperationMustBePending} from "contracts/common/L1ContractErrors.sol"; contract ReentrancyTest is GovernanceTest { using stdStorage for StdStorage; @@ -88,7 +89,7 @@ contract ReentrancyTest is GovernanceTest { vm.startPrank(address(reenterGovernance)); governance.scheduleTransparent(op, 0); - vm.expectRevert("Operation must be ready after execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(op); } @@ -108,7 +109,7 @@ contract ReentrancyTest is GovernanceTest { vm.startPrank(address(reenterGovernance)); governance.scheduleTransparent(op, 0); - vm.expectRevert("Operation must be pending after execution"); + vm.expectRevert(OperationMustBePending.selector); governance.executeInstant(op); } @@ -125,7 +126,7 @@ contract ReentrancyTest is GovernanceTest { vm.startPrank(address(reenterGovernance)); governance.scheduleTransparent(op, 0); - vm.expectRevert("Operation must be ready after execution"); + vm.expectRevert(OperationMustBeReady.selector); governance.execute(op); } @@ -145,7 +146,7 @@ contract ReentrancyTest is GovernanceTest { vm.startPrank(address(reenterGovernance)); governance.scheduleTransparent(op, 0); - vm.expectRevert("Operation must be pending after execution"); + vm.expectRevert(OperationMustBePending.selector); governance.executeInstant(op); } } diff --git a/l1-contracts/test/foundry/unit/concrete/Governance/SelfUpgrades.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/SelfUpgrades.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/Governance/SelfUpgrades.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Governance/SelfUpgrades.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/Governance/_Governance_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Governance/_Governance_Shared.t.sol similarity index 93% rename from l1-contracts/test/foundry/unit/concrete/Governance/_Governance_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Governance/_Governance_Shared.t.sol index e7f499254..2a34bc2ff 100644 --- a/l1-contracts/test/foundry/unit/concrete/Governance/_Governance_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Governance/_Governance_Shared.t.sol @@ -6,6 +6,7 @@ import {Test} from "forge-std/Test.sol"; import {Governance} from "contracts/governance/Governance.sol"; import {IGovernance} from "contracts/governance/IGovernance.sol"; +import {Call} from "contracts/governance/Common.sol"; import {EventOnFallback} from "contracts/dev-contracts/EventOnFallback.sol"; import {Forwarder} from "contracts/dev-contracts/Forwarder.sol"; import {RevertFallback} from "contracts/dev-contracts/RevertFallback.sol"; @@ -58,8 +59,8 @@ contract GovernanceTest is Test, EventOnFallback { uint256 _value, bytes memory _data ) internal pure returns (IGovernance.Operation memory) { - IGovernance.Call[] memory calls = new IGovernance.Call[](1); - calls[0] = IGovernance.Call({target: _target, value: _value, data: _data}); + Call[] memory calls = new Call[](1); + calls[0] = Call({target: _target, value: _value, data: _data}); return IGovernance.Operation({calls: calls, salt: bytes32(0), predecessor: bytes32(0)}); } diff --git a/l1-contracts/test/foundry/unit/concrete/Utils/Utils.sol b/l1-contracts/test/foundry/l1/unit/concrete/Utils/Utils.sol similarity index 63% rename from l1-contracts/test/foundry/unit/concrete/Utils/Utils.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Utils/Utils.sol index b52d1e122..eb8919c83 100644 --- a/l1-contracts/test/foundry/unit/concrete/Utils/Utils.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Utils/Utils.sol @@ -4,6 +4,7 @@ pragma solidity 0.8.24; import {UtilsFacet} from "../Utils/UtilsFacet.sol"; +import "forge-std/console.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; @@ -11,18 +12,21 @@ import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; -import {IVerifier, VerifierParams} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; -import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; +import {IVerifier, VerifierParams} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; import {InitializeData, InitializeDataNewChain} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; import {IExecutor, SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; +import {DummyBridgehub} from "contracts/dev-contracts/test/DummyBridgehub.sol"; +import {PriorityOpsBatchInfo} from "contracts/state-transition/libraries/PriorityTree.sol"; bytes32 constant DEFAULT_L2_LOGS_TREE_ROOT_HASH = 0x0000000000000000000000000000000000000000000000000000000000000000; address constant L2_SYSTEM_CONTEXT_ADDRESS = 0x000000000000000000000000000000000000800B; address constant L2_BOOTLOADER_ADDRESS = 0x0000000000000000000000000000000000008001; address constant L2_KNOWN_CODE_STORAGE_ADDRESS = 0x0000000000000000000000000000000000008004; address constant L2_TO_L1_MESSENGER = 0x0000000000000000000000000000000000008008; -address constant PUBDATA_PUBLISHER_ADDRESS = 0x0000000000000000000000000000000000008011; +// constant in tests, but can be arbitrary address in real environments +address constant L2_DA_VALIDATOR_ADDRESS = 0x2f3Bc0cB46C9780990afbf86A60bdf6439DE991C; uint256 constant MAX_NUMBER_OF_BLOBS = 6; uint256 constant TOTAL_BLOBS_IN_COMMITMENT = 16; @@ -55,8 +59,8 @@ library Utils { return abi.encodePacked(servicePrefix, bytes2(0x0000), sender, key, value); } - function createSystemLogs() public pure returns (bytes[] memory) { - bytes[] memory logs = new bytes[](13); + function createSystemLogs(bytes32 _outputHash) public returns (bytes[] memory) { + bytes[] memory logs = new bytes[](7); logs[0] = constructL2Log( true, L2_TO_L1_MESSENGER, @@ -64,68 +68,90 @@ library Utils { bytes32("") ); logs[1] = constructL2Log( - true, - L2_TO_L1_MESSENGER, - uint256(SystemLogKey.TOTAL_L2_TO_L1_PUBDATA_KEY), - 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563 - ); - logs[2] = constructL2Log(true, L2_TO_L1_MESSENGER, uint256(SystemLogKey.STATE_DIFF_HASH_KEY), bytes32("")); - logs[3] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), bytes32("") ); - logs[4] = constructL2Log( + logs[2] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, uint256(SystemLogKey.PREV_BATCH_HASH_KEY), bytes32("") ); - logs[5] = constructL2Log( + logs[3] = constructL2Log( true, L2_BOOTLOADER_ADDRESS, uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY), keccak256("") ); - logs[6] = constructL2Log( + logs[4] = constructL2Log( true, L2_BOOTLOADER_ADDRESS, uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY), bytes32("") ); - logs[7] = constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_ONE_HASH_KEY), bytes32(0)); - logs[8] = constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_TWO_HASH_KEY), bytes32(0)); - logs[9] = constructL2Log( + + logs[5] = constructL2Log( true, - PUBDATA_PUBLISHER_ADDRESS, - uint256(SystemLogKey.BLOB_THREE_HASH_KEY), - bytes32(0) + L2_TO_L1_MESSENGER, + uint256(SystemLogKey.L2_DA_VALIDATOR_OUTPUT_HASH_KEY), + _outputHash ); - logs[10] = constructL2Log( + logs[6] = constructL2Log( true, - PUBDATA_PUBLISHER_ADDRESS, - uint256(SystemLogKey.BLOB_FOUR_HASH_KEY), - bytes32(0) + L2_TO_L1_MESSENGER, + uint256(SystemLogKey.USED_L2_DA_VALIDATOR_ADDRESS_KEY), + bytes32(uint256(uint160(L2_DA_VALIDATOR_ADDRESS))) ); - logs[11] = constructL2Log( + + return logs; + } + + function createSystemLogsWithEmptyDAValidator() public returns (bytes[] memory) { + bytes[] memory systemLogs = createSystemLogs(bytes32(0)); + systemLogs[uint256(SystemLogKey.USED_L2_DA_VALIDATOR_ADDRESS_KEY)] = constructL2Log( true, - PUBDATA_PUBLISHER_ADDRESS, - uint256(SystemLogKey.BLOB_FIVE_HASH_KEY), - bytes32(0) + L2_TO_L1_MESSENGER, + uint256(SystemLogKey.USED_L2_DA_VALIDATOR_ADDRESS_KEY), + bytes32(uint256(0)) ); - logs[12] = constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_SIX_HASH_KEY), bytes32(0)); - return logs; + + return systemLogs; } function createSystemLogsWithUpgradeTransaction( bytes32 _expectedSystemContractUpgradeTxHash - ) public pure returns (bytes[] memory) { - bytes[] memory logsWithoutUpgradeTx = createSystemLogs(); + ) public returns (bytes[] memory) { + bytes[] memory logsWithoutUpgradeTx = createSystemLogs(bytes32(0)); + bytes[] memory logs = new bytes[](logsWithoutUpgradeTx.length + 1); + for (uint256 i = 0; i < logsWithoutUpgradeTx.length; i++) { + logs[i] = logsWithoutUpgradeTx[i]; + } + logs[logsWithoutUpgradeTx.length] = constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + uint256(SystemLogKey.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY), + _expectedSystemContractUpgradeTxHash + ); + return logs; + } + + function createSystemLogsWithUpgradeTransactionForCTM( + bytes32 _expectedSystemContractUpgradeTxHash, + bytes32 _outputHash + ) public returns (bytes[] memory) { + bytes[] memory logsWithoutUpgradeTx = createSystemLogs(_outputHash); bytes[] memory logs = new bytes[](logsWithoutUpgradeTx.length + 1); for (uint256 i = 0; i < logsWithoutUpgradeTx.length; i++) { logs[i] = logsWithoutUpgradeTx[i]; } + logs[uint256(SystemLogKey.PREV_BATCH_HASH_KEY)] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PREV_BATCH_HASH_KEY), + bytes32(uint256(0x01)) + ); logs[logsWithoutUpgradeTx.length] = constructL2Log( true, L2_BOOTLOADER_ADDRESS, @@ -161,18 +187,7 @@ library Utils { bootloaderHeapInitialContentsHash: randomBytes32("bootloaderHeapInitialContentsHash"), eventsQueueStateHash: randomBytes32("eventsQueueStateHash"), systemLogs: abi.encode(randomBytes32("systemLogs")), - pubdataCommitments: abi.encodePacked(uint256(0)) - }); - } - - function createProofInput() public pure returns (IExecutor.ProofInput memory) { - uint256[] memory recursiveAggregationInput; - uint256[] memory serializedProof; - - return - IExecutor.ProofInput({ - recursiveAggregationInput: recursiveAggregationInput, - serializedProof: serializedProof + operatorDAInput: abi.encodePacked(uint256(0)) }); } @@ -184,8 +199,42 @@ library Utils { return result; } + function encodeCommitBatchesData( + IExecutor.StoredBatchInfo memory _lastCommittedBatchData, + IExecutor.CommitBatchInfo[] memory _newBatchesData + ) internal pure returns (uint256, uint256, bytes memory) { + return ( + _newBatchesData[0].batchNumber, + _newBatchesData[_newBatchesData.length - 1].batchNumber, + bytes.concat(bytes1(0x00), abi.encode(_lastCommittedBatchData, _newBatchesData)) + ); + } + + function encodeProveBatchesData( + IExecutor.StoredBatchInfo memory _prevBatch, + IExecutor.StoredBatchInfo[] memory _committedBatches, + uint256[] memory _proof + ) internal pure returns (uint256, uint256, bytes memory) { + return ( + _committedBatches[0].batchNumber, + _committedBatches[_committedBatches.length - 1].batchNumber, + bytes.concat(bytes1(0x00), abi.encode(_prevBatch, _committedBatches, _proof)) + ); + } + + function encodeExecuteBatchesData( + IExecutor.StoredBatchInfo[] memory _batchesData, + PriorityOpsBatchInfo[] memory _priorityOpsData + ) internal pure returns (uint256, uint256, bytes memory) { + return ( + _batchesData[0].batchNumber, + _batchesData[_batchesData.length - 1].batchNumber, + bytes.concat(bytes1(0x00), abi.encode(_batchesData, _priorityOpsData)) + ); + } + function getAdminSelectors() public pure returns (bytes4[] memory) { - bytes4[] memory selectors = new bytes4[](11); + bytes4[] memory selectors = new bytes4[](13); selectors[0] = AdminFacet.setPendingAdmin.selector; selectors[1] = AdminFacet.acceptAdmin.selector; selectors[2] = AdminFacet.setValidator.selector; @@ -197,20 +246,22 @@ library Utils { selectors[8] = AdminFacet.executeUpgrade.selector; selectors[9] = AdminFacet.freezeDiamond.selector; selectors[10] = AdminFacet.unfreezeDiamond.selector; + selectors[11] = AdminFacet.genesisUpgrade.selector; + selectors[12] = AdminFacet.setDAValidatorPair.selector; return selectors; } function getExecutorSelectors() public pure returns (bytes4[] memory) { bytes4[] memory selectors = new bytes4[](4); - selectors[0] = ExecutorFacet.commitBatches.selector; - selectors[1] = ExecutorFacet.proveBatches.selector; - selectors[2] = ExecutorFacet.executeBatches.selector; - selectors[3] = ExecutorFacet.revertBatches.selector; + selectors[0] = ExecutorFacet.commitBatchesSharedBridge.selector; + selectors[1] = ExecutorFacet.proveBatchesSharedBridge.selector; + selectors[2] = ExecutorFacet.executeBatchesSharedBridge.selector; + selectors[3] = ExecutorFacet.revertBatchesSharedBridge.selector; return selectors; } function getGettersSelectors() public pure returns (bytes4[] memory) { - bytes4[] memory selectors = new bytes4[](29); + bytes4[] memory selectors = new bytes4[](31); selectors[0] = GettersFacet.getVerifier.selector; selectors[1] = GettersFacet.getAdmin.selector; selectors[2] = GettersFacet.getPendingAdmin.selector; @@ -220,7 +271,7 @@ library Utils { selectors[6] = GettersFacet.getTotalPriorityTxs.selector; selectors[7] = GettersFacet.getFirstUnprocessedPriorityTx.selector; selectors[8] = GettersFacet.getPriorityQueueSize.selector; - selectors[9] = GettersFacet.priorityQueueFrontOperation.selector; + selectors[9] = GettersFacet.getL2SystemContractsUpgradeTxHash.selector; selectors[10] = GettersFacet.isValidator.selector; selectors[11] = GettersFacet.l2LogsRootHash.selector; selectors[12] = GettersFacet.storedBatchHash.selector; @@ -239,7 +290,9 @@ library Utils { selectors[25] = GettersFacet.getTotalBatchesCommitted.selector; selectors[26] = GettersFacet.getTotalBatchesVerified.selector; selectors[27] = GettersFacet.getTotalBatchesExecuted.selector; - selectors[28] = GettersFacet.getL2SystemContractsUpgradeTxHash.selector; + selectors[28] = GettersFacet.getProtocolVersion.selector; + selectors[29] = GettersFacet.getPriorityTreeRoot.selector; + selectors[30] = GettersFacet.getChainId.selector; return selectors; } @@ -256,45 +309,47 @@ library Utils { } function getUtilsFacetSelectors() public pure returns (bytes4[] memory) { - bytes4[] memory selectors = new bytes4[](38); + bytes4[] memory selectors = new bytes4[](39); selectors[0] = UtilsFacet.util_setChainId.selector; selectors[1] = UtilsFacet.util_getChainId.selector; selectors[2] = UtilsFacet.util_setBridgehub.selector; selectors[3] = UtilsFacet.util_getBridgehub.selector; selectors[4] = UtilsFacet.util_setBaseToken.selector; - selectors[5] = UtilsFacet.util_getBaseToken.selector; - selectors[6] = UtilsFacet.util_setBaseTokenBridge.selector; - selectors[7] = UtilsFacet.util_getBaseTokenBridge.selector; - selectors[8] = UtilsFacet.util_setVerifier.selector; - selectors[9] = UtilsFacet.util_getVerifier.selector; - selectors[10] = UtilsFacet.util_setStoredBatchHashes.selector; - selectors[11] = UtilsFacet.util_getStoredBatchHashes.selector; - selectors[12] = UtilsFacet.util_setVerifierParams.selector; - selectors[13] = UtilsFacet.util_getVerifierParams.selector; - selectors[14] = UtilsFacet.util_setL2BootloaderBytecodeHash.selector; - selectors[15] = UtilsFacet.util_getL2BootloaderBytecodeHash.selector; - selectors[16] = UtilsFacet.util_setL2DefaultAccountBytecodeHash.selector; - selectors[17] = UtilsFacet.util_getL2DefaultAccountBytecodeHash.selector; - selectors[18] = UtilsFacet.util_setPendingAdmin.selector; - selectors[19] = UtilsFacet.util_getPendingAdmin.selector; - selectors[20] = UtilsFacet.util_setAdmin.selector; - selectors[21] = UtilsFacet.util_getAdmin.selector; - selectors[22] = UtilsFacet.util_setValidator.selector; - selectors[23] = UtilsFacet.util_getValidator.selector; - selectors[24] = UtilsFacet.util_setZkPorterAvailability.selector; - selectors[25] = UtilsFacet.util_getZkPorterAvailability.selector; - selectors[26] = UtilsFacet.util_setStateTransitionManager.selector; - selectors[27] = UtilsFacet.util_getStateTransitionManager.selector; - selectors[28] = UtilsFacet.util_setPriorityTxMaxGasLimit.selector; - selectors[29] = UtilsFacet.util_getPriorityTxMaxGasLimit.selector; - selectors[30] = UtilsFacet.util_setFeeParams.selector; - selectors[31] = UtilsFacet.util_getFeeParams.selector; - selectors[32] = UtilsFacet.util_setProtocolVersion.selector; - selectors[33] = UtilsFacet.util_getProtocolVersion.selector; - selectors[34] = UtilsFacet.util_setIsFrozen.selector; - selectors[35] = UtilsFacet.util_getIsFrozen.selector; - selectors[36] = UtilsFacet.util_setTransactionFilterer.selector; - selectors[37] = UtilsFacet.util_setBaseTokenGasPriceMultiplierDenominator.selector; + selectors[5] = UtilsFacet.util_getBaseTokenAssetId.selector; + selectors[6] = UtilsFacet.util_setVerifier.selector; + selectors[7] = UtilsFacet.util_getVerifier.selector; + selectors[8] = UtilsFacet.util_setStoredBatchHashes.selector; + selectors[9] = UtilsFacet.util_getStoredBatchHashes.selector; + selectors[10] = UtilsFacet.util_setVerifierParams.selector; + selectors[11] = UtilsFacet.util_getVerifierParams.selector; + selectors[12] = UtilsFacet.util_setL2BootloaderBytecodeHash.selector; + selectors[13] = UtilsFacet.util_getL2BootloaderBytecodeHash.selector; + selectors[14] = UtilsFacet.util_setL2DefaultAccountBytecodeHash.selector; + selectors[15] = UtilsFacet.util_getL2DefaultAccountBytecodeHash.selector; + selectors[16] = UtilsFacet.util_setPendingAdmin.selector; + selectors[17] = UtilsFacet.util_getPendingAdmin.selector; + selectors[18] = UtilsFacet.util_setAdmin.selector; + selectors[19] = UtilsFacet.util_getAdmin.selector; + selectors[20] = UtilsFacet.util_setValidator.selector; + selectors[21] = UtilsFacet.util_getValidator.selector; + selectors[22] = UtilsFacet.util_setZkPorterAvailability.selector; + selectors[23] = UtilsFacet.util_getZkPorterAvailability.selector; + selectors[24] = UtilsFacet.util_setChainTypeManager.selector; + selectors[25] = UtilsFacet.util_getChainTypeManager.selector; + selectors[26] = UtilsFacet.util_setPriorityTxMaxGasLimit.selector; + selectors[27] = UtilsFacet.util_getPriorityTxMaxGasLimit.selector; + selectors[28] = UtilsFacet.util_setFeeParams.selector; + selectors[29] = UtilsFacet.util_getFeeParams.selector; + selectors[30] = UtilsFacet.util_setProtocolVersion.selector; + selectors[31] = UtilsFacet.util_getProtocolVersion.selector; + selectors[32] = UtilsFacet.util_setIsFrozen.selector; + selectors[33] = UtilsFacet.util_getIsFrozen.selector; + selectors[34] = UtilsFacet.util_setTransactionFilterer.selector; + selectors[35] = UtilsFacet.util_setBaseTokenGasPriceMultiplierDenominator.selector; + selectors[36] = UtilsFacet.util_setTotalBatchesExecuted.selector; + selectors[37] = UtilsFacet.util_setL2LogsRootHash.selector; + selectors[38] = UtilsFacet.util_setBaseTokenGasPriceMultiplierNominator.selector; + return selectors; } @@ -319,19 +374,23 @@ library Utils { }); } - function makeInitializeData(address testnetVerifier) public pure returns (InitializeData memory) { + function makeInitializeData(address testnetVerifier) public returns (InitializeData memory) { + DummyBridgehub dummyBridgehub = new DummyBridgehub(); + return InitializeData({ chainId: 1, - bridgehub: address(0x876543567890), - stateTransitionManager: address(0x1234567890876543567890), + bridgehub: address(dummyBridgehub), + chainTypeManager: address(0x1234567890876543567890), protocolVersion: 0, admin: address(0x32149872498357874258787), validatorTimelock: address(0x85430237648403822345345), - baseToken: address(0x923645439232223445), - baseTokenBridge: address(0x23746765237749923040872834), + baseTokenAssetId: bytes32(uint256(0x923645439232223445)), storedBatchZero: bytes32(0), - verifier: makeVerifier(testnetVerifier), + dualVerifier: makeVerifier(testnetVerifier), + plonkVerifier: address(0x23432423445), + fflonkVerifier: address(0x24345211125), + fflonkProofLength: 0, verifierParams: makeVerifierParams(), l2BootloaderBytecodeHash: 0x0100000000000000000000000000000000000000000000000000000000000000, l2DefaultAccountBytecodeHash: 0x0100000000000000000000000000000000000000000000000000000000000000, @@ -346,7 +405,10 @@ library Utils { ) public pure returns (InitializeDataNewChain memory) { return InitializeDataNewChain({ - verifier: makeVerifier(testnetVerifier), + dualVerifier: makeVerifier(testnetVerifier), + plonkVerifier: address(0x23432423445), + fflonkVerifier: address(0x24345211125), + fflonkProofLength: 0, verifierParams: makeVerifierParams(), l2BootloaderBytecodeHash: 0x0100000000000000000000000000000000000000000000000000000000000000, l2DefaultAccountBytecodeHash: 0x0100000000000000000000000000000000000000000000000000000000000000, @@ -459,8 +521,8 @@ library Utils { ) internal pure returns (bytes32[] memory blobAuxOutputWords) { // These invariants should be checked by the caller of this function, but we double check // just in case. - require(_blobCommitments.length == MAX_NUMBER_OF_BLOBS, "b10"); - require(_blobHashes.length == MAX_NUMBER_OF_BLOBS, "b11"); + require(_blobCommitments.length == TOTAL_BLOBS_IN_COMMITMENT, "b10"); + require(_blobHashes.length == TOTAL_BLOBS_IN_COMMITMENT, "b11"); // for each blob we have: // linear hash (hash of preimage from system logs) and @@ -472,12 +534,62 @@ library Utils { blobAuxOutputWords = new bytes32[](2 * TOTAL_BLOBS_IN_COMMITMENT); - for (uint256 i = 0; i < MAX_NUMBER_OF_BLOBS; i++) { + for (uint256 i = 0; i < TOTAL_BLOBS_IN_COMMITMENT; i++) { blobAuxOutputWords[i * 2] = _blobHashes[i]; blobAuxOutputWords[i * 2 + 1] = _blobCommitments[i]; } } + function constructRollupL2DAValidatorOutputHash( + bytes32 _stateDiffHash, + bytes32 _totalPubdataHash, + uint8 _blobsAmount, + bytes32[] memory _blobHashes + ) public pure returns (bytes32) { + return keccak256(abi.encodePacked(_stateDiffHash, _totalPubdataHash, _blobsAmount, _blobHashes)); + } + + function getDefaultBlobCommitment() public pure returns (bytes memory) { + bytes16 blobOpeningPoint = 0x7142c5851421a2dc03dde0aabdb0ffdb; + bytes32 blobClaimedValue = 0x1e5eea3bbb85517461c1d1c7b84c7c2cec050662a5e81a71d5d7e2766eaff2f0; + bytes + memory commitment = hex"ad5a32c9486ad7ab553916b36b742ed89daffd4538d95f4fc8a6c5c07d11f4102e34b3c579d9b4eb6c295a78e484d3bf"; + bytes + memory blobProof = hex"b7565b1cf204d9f35cec98a582b8a15a1adff6d21f3a3a6eb6af5a91f0a385c069b34feb70bea141038dc7faca5ed364"; + + return abi.encodePacked(blobOpeningPoint, blobClaimedValue, commitment, blobProof); + } + + function defaultPointEvaluationPrecompileInput(bytes32 _versionedHash) public view returns (bytes memory) { + return + abi.encodePacked( + _versionedHash, + bytes32(uint256(uint128(0x7142c5851421a2dc03dde0aabdb0ffdb))), // opening point + abi.encodePacked( + bytes32(0x1e5eea3bbb85517461c1d1c7b84c7c2cec050662a5e81a71d5d7e2766eaff2f0), // claimed value + hex"ad5a32c9486ad7ab553916b36b742ed89daffd4538d95f4fc8a6c5c07d11f4102e34b3c579d9b4eb6c295a78e484d3bf", // commitment + hex"b7565b1cf204d9f35cec98a582b8a15a1adff6d21f3a3a6eb6af5a91f0a385c069b34feb70bea141038dc7faca5ed364" // proof + ) + ); + } + + function emptyData() internal pure returns (PriorityOpsBatchInfo[] calldata _empty) { + assembly { + _empty.offset := 0 + _empty.length := 0 + } + } + + function generatePriorityOps(uint256 len) internal pure returns (PriorityOpsBatchInfo[] memory _ops) { + _ops = new PriorityOpsBatchInfo[](len); + bytes32[] memory empty; + PriorityOpsBatchInfo memory info = PriorityOpsBatchInfo({leftPath: empty, rightPath: empty, itemHashes: empty}); + + for (uint256 i = 0; i < len; ++i) { + _ops[i] = info; + } + } + // add this to be excluded from coverage report function test() internal {} } diff --git a/l1-contracts/test/foundry/unit/concrete/Utils/Utils.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Utils/Utils.t.sol similarity index 66% rename from l1-contracts/test/foundry/unit/concrete/Utils/Utils.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Utils/Utils.t.sol index b7659295c..0c9ad684f 100644 --- a/l1-contracts/test/foundry/unit/concrete/Utils/Utils.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Utils/Utils.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils, L2_TO_L1_MESSENGER, L2_SYSTEM_CONTEXT_ADDRESS, L2_BOOTLOADER_ADDRESS, PUBDATA_PUBLISHER_ADDRESS} from "./Utils.sol"; +import {Utils, L2_TO_L1_MESSENGER, L2_SYSTEM_CONTEXT_ADDRESS, L2_BOOTLOADER_ADDRESS, L2_TO_L1_MESSENGER, L2_DA_VALIDATOR_ADDRESS} from "./Utils.sol"; import {SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; // solhint-enable max-line-length @@ -43,9 +43,9 @@ contract UtilsTest is Test { } function test_CreateSystemLogs() public { - bytes[] memory logs = Utils.createSystemLogs(); + bytes[] memory logs = Utils.createSystemLogs(bytes32(0)); - assertEq(logs.length, 13, "logs length should be correct"); + assertEq(logs.length, 7, "logs length should be correct"); assertEq( logs[0], @@ -60,104 +60,68 @@ contract UtilsTest is Test { assertEq( logs[1], - Utils.constructL2Log( - true, - L2_TO_L1_MESSENGER, - uint256(SystemLogKey.TOTAL_L2_TO_L1_PUBDATA_KEY), - 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563 - ), - "log[1] should be correct" - ); - - assertEq( - logs[2], - Utils.constructL2Log(true, L2_TO_L1_MESSENGER, uint256(SystemLogKey.STATE_DIFF_HASH_KEY), bytes32("")), - "log[2] should be correct" - ); - - assertEq( - logs[3], Utils.constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), bytes32("") ), - "log[3] should be correct" + "log[1] should be correct" ); assertEq( - logs[4], + logs[2], Utils.constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, uint256(SystemLogKey.PREV_BATCH_HASH_KEY), bytes32("") ), - "log[4] should be correct" + "log[2] should be correct" ); assertEq( - logs[5], + logs[3], Utils.constructL2Log( true, L2_BOOTLOADER_ADDRESS, uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY), keccak256("") ), - "log[5] should be correct" + "log[3] should be correct" ); assertEq( - logs[6], + logs[4], Utils.constructL2Log( true, L2_BOOTLOADER_ADDRESS, uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY), bytes32("") ), - "log[6] should be correct" - ); - - assertEq( - logs[7], - Utils.constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_ONE_HASH_KEY), bytes32(0)), - "log[7] should be correct" - ); - - assertEq( - logs[8], - Utils.constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_TWO_HASH_KEY), bytes32(0)), - "log[8] should be correct" + "log[4] should be correct" ); assertEq( - logs[9], + logs[5], Utils.constructL2Log( true, - PUBDATA_PUBLISHER_ADDRESS, - uint256(SystemLogKey.BLOB_THREE_HASH_KEY), + L2_TO_L1_MESSENGER, + uint256(SystemLogKey.L2_DA_VALIDATOR_OUTPUT_HASH_KEY), bytes32(0) ), - "log[9] should be correct" - ); - - assertEq( - logs[10], - Utils.constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_FOUR_HASH_KEY), bytes32(0)), - "log[8] should be correct" - ); - - assertEq( - logs[11], - Utils.constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_FIVE_HASH_KEY), bytes32(0)), - "log[11] should be correct" + "log[5] should be correct" ); assertEq( - logs[12], - Utils.constructL2Log(true, PUBDATA_PUBLISHER_ADDRESS, uint256(SystemLogKey.BLOB_SIX_HASH_KEY), bytes32(0)), - "log[12] should be correct" + logs[6], + Utils.constructL2Log( + true, + L2_TO_L1_MESSENGER, + uint256(SystemLogKey.USED_L2_DA_VALIDATOR_ADDRESS_KEY), + bytes32(uint256(uint160(L2_DA_VALIDATOR_ADDRESS))) + ), + "log[6] should be correct" ); } diff --git a/l1-contracts/test/foundry/unit/concrete/Utils/UtilsFacet.sol b/l1-contracts/test/foundry/l1/unit/concrete/Utils/UtilsFacet.sol similarity index 68% rename from l1-contracts/test/foundry/unit/concrete/Utils/UtilsFacet.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Utils/UtilsFacet.sol index 01864697d..2d6d63492 100644 --- a/l1-contracts/test/foundry/unit/concrete/Utils/UtilsFacet.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Utils/UtilsFacet.sol @@ -2,12 +2,12 @@ pragma solidity 0.8.24; -import {IVerifier, VerifierParams} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; -import {FeeParams} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; -import {ZkSyncHyperchainBase} from "contracts/state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol"; +import {IVerifier, VerifierParams} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {FeeParams} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {ZKChainBase} from "contracts/state-transition/chain-deps/facets/ZKChainBase.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -contract UtilsFacet is ZkSyncHyperchainBase { +contract UtilsFacet is ZKChainBase { function util_setChainId(uint256 _chainId) external { s.chainId = _chainId; } @@ -24,28 +24,44 @@ contract UtilsFacet is ZkSyncHyperchainBase { return s.bridgehub; } - function util_setBaseToken(address _baseToken) external { - s.baseToken = _baseToken; + function util_setBaseToken(bytes32 _baseTokenAssetId) external { + s.baseTokenAssetId = _baseTokenAssetId; } - function util_getBaseToken() external view returns (address) { - return s.baseToken; + function util_getBaseTokenAssetId() external view returns (bytes32) { + return s.baseTokenAssetId; } - function util_setBaseTokenBridge(address _baseTokenBridge) external { - s.baseTokenBridge = _baseTokenBridge; + function util_setDualVerifier(IVerifier _dualVerifier) external { + s.dualVerifier = _dualVerifier; } - function util_getBaseTokenBridge() external view returns (address) { - return s.baseTokenBridge; + function util_getDualVerifier() external view returns (IVerifier) { + return s.dualVerifier; } - function util_setVerifier(IVerifier _verifier) external { - s.verifier = _verifier; + function util_setPlonkVerifier(address _plonkVerifier) external { + s.plonkVerifier = _plonkVerifier; } - function util_getVerifier() external view returns (IVerifier) { - return s.verifier; + function util_getPlonkVerifier() external view returns (address) { + return s.plonkVerifier; + } + + function util_setFflonkVerifier(address _fflonkVerifier) external { + s.fflonkVerifier = _fflonkVerifier; + } + + function util_getFflonkVerifier() external view returns (address) { + return s.fflonkVerifier; + } + + function util_setFflonkProofLength(uint256 _fflonkProofLength) external { + s.fflonkProofLength = _fflonkProofLength; + } + + function util_getFflonkProofLength() external view returns (uint256) { + return s.fflonkProofLength; } function util_setStoredBatchHashes(uint32 _batchId, bytes32 _storedBatchHash) external { @@ -120,12 +136,12 @@ contract UtilsFacet is ZkSyncHyperchainBase { return s.zkPorterIsAvailable; } - function util_setStateTransitionManager(address _stateTransitionManager) external { - s.stateTransitionManager = _stateTransitionManager; + function util_setChainTypeManager(address _chainTypeManager) external { + s.chainTypeManager = _chainTypeManager; } - function util_getStateTransitionManager() external view returns (address) { - return s.stateTransitionManager; + function util_getChainTypeManager() external view returns (address) { + return s.chainTypeManager; } function util_setPriorityTxMaxGasLimit(uint256 _priorityTxMaxGasLimit) external { @@ -162,6 +178,18 @@ contract UtilsFacet is ZkSyncHyperchainBase { return s.isFrozen; } + function util_setTotalBatchesExecuted(uint256 _numberOfBatches) external { + s.totalBatchesExecuted = _numberOfBatches; + } + + function util_setL2LogsRootHash(uint256 _batchNumber, bytes32 _newHash) external { + s.l2LogsRootHashes[_batchNumber] = _newHash; + } + + function util_setBaseTokenGasPriceMultiplierNominator(uint128 _nominator) external { + s.baseTokenGasPriceMultiplierNominator = _nominator; + } + // add this to be excluded from coverage report function test() internal virtual {} } diff --git a/l1-contracts/test/foundry/unit/concrete/ValidatorTimelock/ValidatorTimelock.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/ValidatorTimelock/ValidatorTimelock.t.sol similarity index 56% rename from l1-contracts/test/foundry/unit/concrete/ValidatorTimelock/ValidatorTimelock.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/ValidatorTimelock/ValidatorTimelock.t.sol index dacc45160..3725f54e2 100644 --- a/l1-contracts/test/foundry/unit/concrete/ValidatorTimelock/ValidatorTimelock.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/ValidatorTimelock/ValidatorTimelock.t.sol @@ -4,8 +4,9 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; import {Utils} from "../Utils/Utils.sol"; import {ValidatorTimelock, IExecutor} from "contracts/state-transition/ValidatorTimelock.sol"; -import {DummyStateTransitionManagerForValidatorTimelock} from "contracts/dev-contracts/test/DummyStateTransitionManagerForValidatorTimelock.sol"; -import {IStateTransitionManager} from "contracts/state-transition/IStateTransitionManager.sol"; +import {DummyChainTypeManagerForValidatorTimelock} from "contracts/dev-contracts/test/DummyChainTypeManagerForValidatorTimelock.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {Unauthorized, TimeNotReached} from "contracts/common/L1ContractErrors.sol"; contract ValidatorTimelockTest is Test { /// @notice A new validator has been added. @@ -21,7 +22,7 @@ contract ValidatorTimelockTest is Test { error ValidatorDoesNotExist(uint256 _chainId); ValidatorTimelock validator; - DummyStateTransitionManagerForValidatorTimelock stateTransitionManager; + DummyChainTypeManagerForValidatorTimelock chainTypeManager; address owner; address zkSync; @@ -44,10 +45,10 @@ contract ValidatorTimelockTest is Test { lastBatchNumber = 123; executionDelay = 10; - stateTransitionManager = new DummyStateTransitionManagerForValidatorTimelock(owner, zkSync); + chainTypeManager = new DummyChainTypeManagerForValidatorTimelock(owner, zkSync); validator = new ValidatorTimelock(owner, executionDelay, eraChainId); vm.prank(owner); - validator.setStateTransitionManager(IStateTransitionManager(address(stateTransitionManager))); + validator.setChainTypeManager(IChainTypeManager(address(chainTypeManager))); vm.prank(owner); validator.addValidator(chainId, alice); vm.prank(owner); @@ -91,20 +92,24 @@ contract ValidatorTimelockTest is Test { batchesToCommit[0] = batchToCommit; vm.prank(alice); - validator.commitBatchesSharedBridge(chainId, storedBatch, batchesToCommit); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + storedBatch, + batchesToCommit + ); + validator.commitBatchesSharedBridge(chainId, commitBatchFrom, commitBatchTo, commitData); } - function test_setStateTransitionManager() public { - assert(validator.stateTransitionManager() == IStateTransitionManager(address(stateTransitionManager))); + function test_setChainTypeManager() public { + assert(validator.chainTypeManager() == IChainTypeManager(address(chainTypeManager))); - DummyStateTransitionManagerForValidatorTimelock newManager = new DummyStateTransitionManagerForValidatorTimelock( - bob, - zkSync - ); + DummyChainTypeManagerForValidatorTimelock newManager = new DummyChainTypeManagerForValidatorTimelock( + bob, + zkSync + ); vm.prank(owner); - validator.setStateTransitionManager(IStateTransitionManager(address(newManager))); + validator.setChainTypeManager(IChainTypeManager(address(newManager))); - assert(validator.stateTransitionManager() == IStateTransitionManager(address(newManager))); + assert(validator.chainTypeManager() == IChainTypeManager(address(newManager))); } function test_setExecutionDelay() public { @@ -125,7 +130,11 @@ contract ValidatorTimelockTest is Test { uint64 timestamp = 123456; vm.warp(timestamp); - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatches.selector), abi.encode(eraChainId)); + vm.mockCall( + zkSync, + abi.encodeWithSelector(IExecutor.commitBatchesSharedBridge.selector), + abi.encode(eraChainId) + ); IExecutor.StoredBatchInfo memory storedBatch = Utils.createStoredBatchInfo(); IExecutor.CommitBatchInfo memory batchToCommit = Utils.createCommitBatchInfo(); @@ -134,14 +143,18 @@ contract ValidatorTimelockTest is Test { IExecutor.CommitBatchInfo[] memory batchesToCommit = new IExecutor.CommitBatchInfo[](1); batchesToCommit[0] = batchToCommit; - vm.prank(dan); - validator.commitBatches(storedBatch, batchesToCommit); + vm.prank(alice); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + storedBatch, + batchesToCommit + ); + validator.commitBatchesSharedBridge(chainId, commitBatchFrom, commitBatchTo, commitData); - assert(validator.getCommittedBatchTimestamp(eraChainId, batchNumber) == timestamp); + assert(validator.getCommittedBatchTimestamp(chainId, batchNumber) == timestamp); } function test_commitBatches() public { - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatches.selector), abi.encode(chainId)); + vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatchesSharedBridge.selector), abi.encode(chainId)); IExecutor.StoredBatchInfo memory storedBatch = Utils.createStoredBatchInfo(); IExecutor.CommitBatchInfo memory batchToCommit = Utils.createCommitBatchInfo(); @@ -149,93 +162,48 @@ contract ValidatorTimelockTest is Test { IExecutor.CommitBatchInfo[] memory batchesToCommit = new IExecutor.CommitBatchInfo[](1); batchesToCommit[0] = batchToCommit; - vm.prank(dan); - validator.commitBatches(storedBatch, batchesToCommit); - } - - function test_revertBatches() public { - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.revertBatches.selector), abi.encode(lastBatchNumber)); - - vm.prank(dan); - validator.revertBatches(lastBatchNumber); + vm.prank(alice); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + storedBatch, + batchesToCommit + ); + validator.commitBatchesSharedBridge(chainId, commitBatchFrom, commitBatchTo, commitData); } function test_revertBatchesSharedBridge() public { - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.revertBatches.selector), abi.encode(chainId)); + vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.revertBatchesSharedBridge.selector), abi.encode(chainId)); vm.prank(alice); validator.revertBatchesSharedBridge(chainId, lastBatchNumber); } - function test_proveBatches() public { - IExecutor.StoredBatchInfo memory prevBatch = Utils.createStoredBatchInfo(); - IExecutor.StoredBatchInfo memory batchToProve = Utils.createStoredBatchInfo(); - IExecutor.ProofInput memory proof = Utils.createProofInput(); - - IExecutor.StoredBatchInfo[] memory batchesToProve = new IExecutor.StoredBatchInfo[](1); - batchesToProve[0] = batchToProve; - - vm.mockCall( - zkSync, - abi.encodeWithSelector(IExecutor.proveBatches.selector), - abi.encode(prevBatch, batchesToProve, proof) - ); - vm.prank(dan); - validator.proveBatches(prevBatch, batchesToProve, proof); - } - function test_proveBatchesSharedBridge() public { IExecutor.StoredBatchInfo memory prevBatch = Utils.createStoredBatchInfo(); IExecutor.StoredBatchInfo memory batchToProve = Utils.createStoredBatchInfo(); - IExecutor.ProofInput memory proof = Utils.createProofInput(); + uint256[] memory proof = new uint256[](0); IExecutor.StoredBatchInfo[] memory batchesToProve = new IExecutor.StoredBatchInfo[](1); batchesToProve[0] = batchToProve; vm.mockCall( zkSync, - abi.encodeWithSelector(IExecutor.proveBatches.selector), + abi.encodeWithSelector(IExecutor.proveBatchesSharedBridge.selector), abi.encode(chainId, prevBatch, batchesToProve, proof) ); vm.prank(alice); - validator.proveBatchesSharedBridge(chainId, prevBatch, batchesToProve, proof); - } - - function test_executeBatches() public { - uint64 timestamp = 123456; - uint64 batchNumber = 123; - // Commit batches first to have the valid timestamp - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatches.selector), abi.encode(chainId)); - - IExecutor.StoredBatchInfo memory storedBatch1 = Utils.createStoredBatchInfo(); - IExecutor.CommitBatchInfo memory batchToCommit = Utils.createCommitBatchInfo(); - - batchToCommit.batchNumber = batchNumber; - IExecutor.CommitBatchInfo[] memory batchesToCommit = new IExecutor.CommitBatchInfo[](1); - batchesToCommit[0] = batchToCommit; - - vm.prank(dan); - vm.warp(timestamp); - validator.commitBatches(storedBatch1, batchesToCommit); - - // Execute batches - IExecutor.StoredBatchInfo memory storedBatch2 = Utils.createStoredBatchInfo(); - storedBatch2.batchNumber = batchNumber; - IExecutor.StoredBatchInfo[] memory storedBatches = new IExecutor.StoredBatchInfo[](1); - storedBatches[0] = storedBatch2; - - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.proveBatches.selector), abi.encode(storedBatches)); - - vm.prank(dan); - vm.warp(timestamp + executionDelay + 1); - validator.executeBatches(storedBatches); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + prevBatch, + batchesToProve, + proof + ); + validator.proveBatchesSharedBridge(chainId, proveBatchFrom, proveBatchTo, proveData); } function test_executeBatchesSharedBridge() public { uint64 timestamp = 123456; uint64 batchNumber = 123; // Commit batches first to have the valid timestamp - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatches.selector), abi.encode(chainId)); + vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatchesSharedBridge.selector), abi.encode(chainId)); IExecutor.StoredBatchInfo memory storedBatch1 = Utils.createStoredBatchInfo(); IExecutor.CommitBatchInfo memory batchToCommit = Utils.createCommitBatchInfo(); @@ -246,7 +214,11 @@ contract ValidatorTimelockTest is Test { vm.prank(alice); vm.warp(timestamp); - validator.commitBatchesSharedBridge(chainId, storedBatch1, batchesToCommit); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + storedBatch1, + batchesToCommit + ); + validator.commitBatchesSharedBridge(chainId, commitBatchFrom, commitBatchTo, commitData); // Execute batches IExecutor.StoredBatchInfo memory storedBatch2 = Utils.createStoredBatchInfo(); @@ -254,11 +226,19 @@ contract ValidatorTimelockTest is Test { IExecutor.StoredBatchInfo[] memory storedBatches = new IExecutor.StoredBatchInfo[](1); storedBatches[0] = storedBatch2; - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.proveBatches.selector), abi.encode(storedBatches)); + vm.mockCall( + zkSync, + abi.encodeWithSelector(IExecutor.proveBatchesSharedBridge.selector), + abi.encode(storedBatches) + ); vm.prank(alice); vm.warp(timestamp + executionDelay + 1); - validator.executeBatchesSharedBridge(chainId, storedBatches); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatches, + Utils.emptyData() + ); + validator.executeBatchesSharedBridge(chainId, executeBatchFrom, executeBatchTo, executeData); } function test_RevertWhen_setExecutionDelayNotOwner() public { @@ -270,7 +250,7 @@ contract ValidatorTimelockTest is Test { function test_RevertWhen_addValidatorNotAdmin() public { assert(validator.validators(chainId, bob) == false); - vm.expectRevert("ValidatorTimelock: only chain admin"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); validator.addValidator(chainId, bob); assert(validator.validators(chainId, bob) == false); @@ -279,7 +259,7 @@ contract ValidatorTimelockTest is Test { function test_RevertWhen_removeValidatorNotAdmin() public { assert(validator.validators(chainId, alice) == true); - vm.expectRevert("ValidatorTimelock: only chain admin"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); validator.removeValidator(chainId, alice); assert(validator.validators(chainId, alice) == true); @@ -309,59 +289,45 @@ contract ValidatorTimelockTest is Test { batchesToCommit[0] = batchToCommit; vm.prank(bob); - vm.expectRevert(bytes("ValidatorTimelock: only validator")); - validator.commitBatches(storedBatch, batchesToCommit); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, bob)); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + storedBatch, + batchesToCommit + ); + validator.commitBatchesSharedBridge(chainId, commitBatchFrom, commitBatchTo, commitData); } - function test_RevertWhen_setStateTransitionManagerNotOwner() public { + function test_RevertWhen_setChainTypeManagerNotOwner() public { vm.expectRevert("Ownable: caller is not the owner"); - validator.setStateTransitionManager(IStateTransitionManager(address(stateTransitionManager))); + validator.setChainTypeManager(IChainTypeManager(address(chainTypeManager))); } function test_RevertWhen_revertBatchesNotValidator() public { - vm.expectRevert("ValidatorTimelock: only validator"); - validator.revertBatches(lastBatchNumber); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); + validator.revertBatchesSharedBridge(uint256(0), lastBatchNumber); } function test_RevertWhen_revertBatchesSharedBridgeNotValidator() public { - vm.expectRevert("ValidatorTimelock: only validator"); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, address(this))); validator.revertBatchesSharedBridge(chainId, lastBatchNumber); } - function test_RevertWhen_proveBatchesNotValidator() public { - IExecutor.StoredBatchInfo memory prevBatch = Utils.createStoredBatchInfo(); - IExecutor.StoredBatchInfo memory batchToProve = Utils.createStoredBatchInfo(); - IExecutor.ProofInput memory proof = Utils.createProofInput(); - - IExecutor.StoredBatchInfo[] memory batchesToProve = new IExecutor.StoredBatchInfo[](1); - batchesToProve[0] = batchToProve; - - vm.expectRevert("ValidatorTimelock: only validator"); - validator.proveBatches(prevBatch, batchesToProve, proof); - } - function test_RevertWhen_proveBatchesSharedBridgeNotValidator() public { IExecutor.StoredBatchInfo memory prevBatch = Utils.createStoredBatchInfo(); IExecutor.StoredBatchInfo memory batchToProve = Utils.createStoredBatchInfo(); - IExecutor.ProofInput memory proof = Utils.createProofInput(); + uint256[] memory proof = new uint256[](0); IExecutor.StoredBatchInfo[] memory batchesToProve = new IExecutor.StoredBatchInfo[](1); batchesToProve[0] = batchToProve; vm.prank(bob); - vm.expectRevert("ValidatorTimelock: only validator"); - validator.proveBatchesSharedBridge(chainId, prevBatch, batchesToProve, proof); - } - - function test_RevertWhen_executeBatchesNotValidator() public { - IExecutor.StoredBatchInfo memory storedBatch = Utils.createStoredBatchInfo(); - - IExecutor.StoredBatchInfo[] memory storedBatches = new IExecutor.StoredBatchInfo[](1); - storedBatches[0] = storedBatch; - - vm.prank(bob); - vm.expectRevert("ValidatorTimelock: only validator"); - validator.executeBatches(storedBatches); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, bob)); + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + prevBatch, + batchesToProve, + proof + ); + validator.proveBatchesSharedBridge(chainId, proveBatchFrom, proveBatchTo, proveData); } function test_RevertWhen_executeBatchesSharedBridgeNotValidator() public { @@ -371,44 +337,19 @@ contract ValidatorTimelockTest is Test { storedBatches[0] = storedBatch; vm.prank(bob); - vm.expectRevert("ValidatorTimelock: only validator"); - validator.executeBatchesSharedBridge(chainId, storedBatches); - } - - function test_RevertWhen_executeBatchesTooEarly() public { - uint64 timestamp = 123456; - uint64 batchNumber = 123; - // Prove batches first to have the valid timestamp - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatches.selector), abi.encode(chainId)); - - IExecutor.StoredBatchInfo memory storedBatch1 = Utils.createStoredBatchInfo(); - IExecutor.CommitBatchInfo memory batchToCommit = Utils.createCommitBatchInfo(); - - batchToCommit.batchNumber = batchNumber; - IExecutor.CommitBatchInfo[] memory batchesToCommit = new IExecutor.CommitBatchInfo[](1); - batchesToCommit[0] = batchToCommit; - - vm.prank(dan); - vm.warp(timestamp); - validator.commitBatches(storedBatch1, batchesToCommit); - - // Execute batches - IExecutor.StoredBatchInfo memory storedBatch2 = Utils.createStoredBatchInfo(); - storedBatch2.batchNumber = batchNumber; - IExecutor.StoredBatchInfo[] memory storedBatches = new IExecutor.StoredBatchInfo[](1); - storedBatches[0] = storedBatch2; - - vm.prank(dan); - vm.warp(timestamp + executionDelay - 1); - vm.expectRevert(bytes("5c")); - validator.executeBatches(storedBatches); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, bob)); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatches, + Utils.emptyData() + ); + validator.executeBatchesSharedBridge(chainId, executeBatchFrom, executeBatchTo, executeData); } function test_RevertWhen_executeBatchesSharedBridgeTooEarly() public { uint64 timestamp = 123456; uint64 batchNumber = 123; // Prove batches first to have the valid timestamp - vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatches.selector), abi.encode(chainId)); + vm.mockCall(zkSync, abi.encodeWithSelector(IExecutor.commitBatchesSharedBridge.selector), abi.encode(chainId)); IExecutor.StoredBatchInfo memory storedBatch1 = Utils.createStoredBatchInfo(); IExecutor.CommitBatchInfo memory batchToCommit = Utils.createCommitBatchInfo(); @@ -419,7 +360,11 @@ contract ValidatorTimelockTest is Test { vm.prank(alice); vm.warp(timestamp); - validator.commitBatchesSharedBridge(chainId, storedBatch1, batchesToCommit); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + storedBatch1, + batchesToCommit + ); + validator.commitBatchesSharedBridge(chainId, commitBatchFrom, commitBatchTo, commitData); // Execute batches IExecutor.StoredBatchInfo memory storedBatch2 = Utils.createStoredBatchInfo(); @@ -429,7 +374,13 @@ contract ValidatorTimelockTest is Test { vm.prank(alice); vm.warp(timestamp + executionDelay - 1); - vm.expectRevert(bytes("5c")); - validator.executeBatchesSharedBridge(chainId, storedBatches); + vm.expectRevert( + abi.encodeWithSelector(TimeNotReached.selector, timestamp + executionDelay, timestamp + executionDelay - 1) + ); + (uint256 executeBatchFrom, uint256 executeBatchTo, bytes memory executeData) = Utils.encodeExecuteBatchesData( + storedBatches, + Utils.emptyData() + ); + validator.executeBatchesSharedBridge(chainId, executeBatchFrom, executeBatchTo, executeData); } } diff --git a/l1-contracts/test/foundry/unit/concrete/Verifier/Verifier.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Verifier/Verifier.t.sol similarity index 90% rename from l1-contracts/test/foundry/unit/concrete/Verifier/Verifier.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/Verifier/Verifier.t.sol index 54ab49974..bd67cfa2b 100644 --- a/l1-contracts/test/foundry/unit/concrete/Verifier/Verifier.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/Verifier/Verifier.t.sol @@ -12,7 +12,6 @@ contract VerifierTestTest is Test { uint256[] public publicInputs; uint256[] public serializedProof; - uint256[] public recursiveAggregationInput; Verifier public verifier; @@ -68,7 +67,7 @@ contract VerifierTestTest is Test { } function testShouldVerify() public view { - bool success = verifier.verify(publicInputs, serializedProof, recursiveAggregationInput); + bool success = verifier.verify(publicInputs, serializedProof); assert(success); } @@ -76,7 +75,7 @@ contract VerifierTestTest is Test { uint256[] memory newPublicInputs = publicInputs; newPublicInputs[0] += uint256(bytes32(0xe000000000000000000000000000000000000000000000000000000000000000)); - bool success = verifier.verify(newPublicInputs, serializedProof, recursiveAggregationInput); + bool success = verifier.verify(newPublicInputs, serializedProof); assert(success); } @@ -86,7 +85,7 @@ contract VerifierTestTest is Test { newSerializedProof[1] += Q_MOD; newSerializedProof[1] += Q_MOD; - bool success = verifier.verify(publicInputs, newSerializedProof, recursiveAggregationInput); + bool success = verifier.verify(publicInputs, newSerializedProof); assert(success); } @@ -94,7 +93,7 @@ contract VerifierTestTest is Test { uint256[] memory newSerializedProof = serializedProof; newSerializedProof[22] += R_MOD; - bool success = verifier.verify(publicInputs, newSerializedProof, recursiveAggregationInput); + bool success = verifier.verify(publicInputs, newSerializedProof); assert(success); } @@ -104,14 +103,14 @@ contract VerifierTestTest is Test { newPublicInputs[1] = publicInputs[0]; vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(newPublicInputs, serializedProof, recursiveAggregationInput); + verifier.verify(newPublicInputs, serializedProof); } function testEmptyPublicInput_shouldRevert() public { uint256[] memory newPublicInputs; vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(newPublicInputs, serializedProof, recursiveAggregationInput); + verifier.verify(newPublicInputs, serializedProof); } function testMoreThan44WordsProof_shouldRevert() public { @@ -123,21 +122,25 @@ contract VerifierTestTest is Test { newSerializedProof[newSerializedProof.length - 1] = serializedProof[serializedProof.length - 1]; vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(publicInputs, newSerializedProof, recursiveAggregationInput); + verifier.verify(publicInputs, newSerializedProof); } function testEmptyProof_shouldRevert() public { uint256[] memory newSerializedProof; vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(publicInputs, newSerializedProof, recursiveAggregationInput); + verifier.verify(publicInputs, newSerializedProof); } - function testNotEmptyRecursiveAggregationInput_shouldRevert() public { - uint256[] memory newRecursiveAggregationInput = publicInputs; + function testLongerProofInput_shouldRevert() public { + uint256[] memory newSerializedProof = new uint256[](serializedProof.length + 1); + for (uint256 i = 0; i < serializedProof.length; i++) { + newSerializedProof[i] = serializedProof[i]; + } + newSerializedProof[newSerializedProof.length - 1] = publicInputs[0]; vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(publicInputs, serializedProof, newRecursiveAggregationInput); + verifier.verify(publicInputs, newSerializedProof); } function testEllipticCurvePointAtInfinity_shouldRevert() public { @@ -146,7 +149,7 @@ contract VerifierTestTest is Test { newSerializedProof[1] = 0; vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(publicInputs, newSerializedProof, recursiveAggregationInput); + verifier.verify(publicInputs, newSerializedProof); } function testInvalidPublicInput_shouldRevert() public { @@ -154,7 +157,7 @@ contract VerifierTestTest is Test { newPublicInputs[0] = 0; vm.expectRevert(bytes("invalid quotient evaluation")); - verifier.verify(newPublicInputs, serializedProof, recursiveAggregationInput); + verifier.verify(newPublicInputs, serializedProof); } function testVerificationKeyHash() public virtual { diff --git a/l1-contracts/test/foundry/l1/unit/concrete/Verifier/VerifierRecursive.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/Verifier/VerifierRecursive.t.sol new file mode 100644 index 000000000..c23759f35 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/Verifier/VerifierRecursive.t.sol @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {VerifierTestTest} from "./Verifier.t.sol"; +import {VerifierRecursiveTest} from "contracts/dev-contracts/test/VerifierRecursiveTest.sol"; + +contract VerifierRecursiveTestTest is VerifierTestTest { + function setUp() public override { + super.setUp(); + + serializedProof.push(2257920826825449939414463854743099397427742128922725774525544832270890253504); + serializedProof.push(9091218701914748532331969127001446391756173432977615061129552313204917562530); + serializedProof.push(16188304989094043810949359833767911976672882599560690320245309499206765021563); + serializedProof.push(3201093556796962656759050531176732990872300033146738631772984017549903765305); + + verifier = new VerifierRecursiveTest(); + } + + function testMoreThan4WordsRecursiveInput_shouldRevert() public { + uint256[] memory newSerializedProof = new uint256[](serializedProof.length + 1); + + for (uint256 i = 0; i < serializedProof.length; i++) { + newSerializedProof[i] = serializedProof[i]; + } + newSerializedProof[newSerializedProof.length - 1] = serializedProof[serializedProof.length - 1]; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testEmptyRecursiveInput_shouldRevert() public { + uint256[] memory newSerializedProof = new uint256[](serializedProof.length - 4); + for (uint256 i = 0; i < newSerializedProof.length; i++) { + newSerializedProof[i] = serializedProof[i]; + } + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testInvalidRecursiveInput_shouldRevert() public { + uint256[] memory newSerializedProof = serializedProof; + newSerializedProof[newSerializedProof.length - 4] = 1; + newSerializedProof[newSerializedProof.length - 3] = 2; + newSerializedProof[newSerializedProof.length - 2] = 1; + newSerializedProof[newSerializedProof.length - 1] = 2; + + vm.expectRevert(bytes("finalPairing: pairing failure")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testVerificationKeyHash() public override { + bytes32 verificationKeyHash = verifier.verificationKeyHash(); + assertEq(verificationKeyHash, 0x88b3ddc4ed85974c7e14297dcad4097169440305c05fdb6441ca8dfd77cd7fa7); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/PushNewLeaf.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/PushNewLeaf.t.sol new file mode 100644 index 000000000..dc08fde8a --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/PushNewLeaf.t.sol @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import {FullMerkleTest} from "./_FullMerkle_Shared.t.sol"; + +contract PushNewLeafTest is FullMerkleTest { + function test_oneLeaf() public { + // Inserting one leaf + bytes32 leaf0 = keccak256("Leaf 0"); + merkleTest.pushNewLeaf(leaf0); + + // Checking the tree structure + assertEq(merkleTest.height(), 0, "Height should be 0 after one insert"); + assertEq(merkleTest.index(), 1, "Leaf number should be 1 after one insert"); + + // Checking leaf node + assertEq(merkleTest.node(0, 0), leaf0, "Node 0,0 should be correctly inserted"); + + // Chekcking zeros tree structure + assertEq(merkleTest.zeros(0), zeroHash, "Zero 0 should be correctly inserted"); + } + + function test_twoLeaves() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + + // Checking the tree structure + assertEq(merkleTest.height(), 1, "Height should be 1 after two inserts"); + assertEq(merkleTest.index(), 2, "Leaf number should be 2 after two inserts"); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), leaf0, "Node 0,0 should be correctly inserted"); + assertEq(merkleTest.node(0, 1), leaf1, "Node 0,1 should be correctly inserted"); + + // Checking parent node + bytes32 l01Hashed = keccak(leaf0, leaf1); + assertEq(merkleTest.node(1, 0), l01Hashed, "Node 1,0 should be correctly inserted"); + + // Checking zeros + bytes32 zeroHashed = keccak(zeroHash, zeroHash); + assertEq(merkleTest.zeros(1), zeroHashed, "Zero 1 should be correctly inserted"); + } + + function test_threeLeaves() public { + // Insert three leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + bytes32 leaf2 = keccak256("Leaf 2"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + merkleTest.pushNewLeaf(leaf2); + + // Checking the tree structure + assertEq(merkleTest.height(), 2, "Height should be 2 after three inserts"); + assertEq(merkleTest.index(), 3, "Leaf number should be 3 after three inserts"); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), leaf0, "Node 0,0 should be correctly inserted"); + assertEq(merkleTest.node(0, 1), leaf1, "Node 0,1 should be correctly inserted"); + assertEq(merkleTest.node(0, 2), leaf2, "Node 0,2 should be correctly inserted"); + + // Checking parent nodes + bytes32 l01Hashed = keccak(leaf0, leaf1); + assertEq(merkleTest.node(1, 0), l01Hashed, "Node 1,0 should be correctly inserted"); + // there is no leaf3 so we hash leaf2 with zero + bytes32 l23Hashed = keccak(leaf2, merkleTest.zeros(0)); + assertEq(merkleTest.node(1, 1), l23Hashed, "Node 1,1 should be correctly inserted"); + + // Checking root node + bytes32 l01l23Hashed = keccak(l01Hashed, l23Hashed); + assertEq(merkleTest.node(2, 0), l01l23Hashed, "Node 2,0 should be correctly inserted"); + + // Checking zero + bytes32 zeroHashed = keccak(zeroHash, zeroHash); + assertEq(merkleTest.zeros(1), zeroHashed, "Zero 1 should be correctly inserted"); + bytes32 zhHashed = keccak(zeroHashed, zeroHashed); + assertEq(merkleTest.zeros(2), zhHashed, "Zero 2 should be correctly inserted"); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/Root.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/Root.t.sol new file mode 100644 index 000000000..3ae519259 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/Root.t.sol @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import {FullMerkleTest} from "./_FullMerkle_Shared.t.sol"; + +contract RootTest is FullMerkleTest { + function test_emptyTree() public view { + // Initially tree is empty, root is the zero hash + assertEq(merkleTest.root(), zeroHash, "Root should be zero hash initially"); + } + + function test_oneLeaf() public { + // Inserting one leaf + bytes32 leaf = keccak256("Leaf 0"); + merkleTest.pushNewLeaf(leaf); + + // With one leaf, root is the leaf itself + assertEq(merkleTest.root(), leaf, "Root should be the leaf hash"); + } + + function test_twoLeaves() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + + // Calculate expected root + bytes32 expectedRoot = keccak(leaf0, leaf1); + assertEq(merkleTest.root(), expectedRoot, "Root should be the hash of the two leaves"); + } + + function test_nodeCountAndRoot() public { + // Initially tree is empty + assertEq(merkleTest.nodeCount(0), 1, "Initial node count at height 0 should be 1"); + + // Inserting three leaves and checking counts and root + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + bytes32 leaf2 = keccak256("Leaf 2"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + merkleTest.pushNewLeaf(leaf2); + + assertEq(merkleTest.nodeCount(0), 3, "Node count at height 0 should be 3 after three inserts"); + assertEq(merkleTest.nodeCount(1), 2, "Node count at height 1 should be 2"); + assertEq(merkleTest.nodeCount(2), 1, "Node count at height 2 should be 1"); + + // Calculate expected root to verify correctness + bytes32 leftChild = keccak(leaf0, leaf1); + bytes32 rightChild = keccak(leaf2, merkleTest.zeros(0)); + bytes32 expectedRoot = keccak(leftChild, rightChild); + + assertEq(merkleTest.root(), expectedRoot, "Root should match expected value after inserts"); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/Setup.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/Setup.t.sol new file mode 100644 index 000000000..6b6af0bc4 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/Setup.t.sol @@ -0,0 +1,12 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import {FullMerkleTest} from "./_FullMerkle_Shared.t.sol"; + +contract SetupTest is FullMerkleTest { + function test_checkInit() public view { + assertEq(merkleTest.height(), 0, "Height should be 0"); + assertEq(merkleTest.index(), 0, "Leaf number should be 0"); + assertEq(merkleTest.zeros(0), zeroHash, "Zero hash should be correctly initialized"); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateAllLeaves.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateAllLeaves.t.sol new file mode 100644 index 000000000..5bb127685 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateAllLeaves.t.sol @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import {FullMerkleTest} from "./_FullMerkle_Shared.t.sol"; + +contract UpdateAllLeavesTest is FullMerkleTest { + function test_revertWhen_wrongLength() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + + // Preparing new leaves for full update + bytes32[] memory newLeaves = new bytes32[](3); + newLeaves[0] = keccak256("New Leaf 0"); + newLeaves[1] = keccak256("New Leaf 1"); + newLeaves[2] = keccak256("New Leaf 2"); + + // Updating all leaves with wrong length + vm.expectRevert(bytes("FMT, wrong length")); + merkleTest.updateAllLeaves(newLeaves); + } + + function test_oneLeaf() public { + // Inserting one leaf + bytes32 leaf0 = keccak256("Leaf 0"); + merkleTest.pushNewLeaf(leaf0); + + // Preparing new leaves for full update + bytes32[] memory newLeaves = new bytes32[](1); + newLeaves[0] = keccak256("New Leaf 0"); + + // Updating all leaves + merkleTest.updateAllLeaves(newLeaves); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), newLeaves[0], "Node 0,0 should be correctly updated"); + } + + function test_twoLeaves() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + + // Preparing new leaves for full update + bytes32[] memory newLeaves = new bytes32[](2); + newLeaves[0] = keccak256("New Leaf 0"); + newLeaves[1] = keccak256("New Leaf 1"); + + // Updating all leaves + merkleTest.updateAllLeaves(newLeaves); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), newLeaves[0], "Node 0,0 should be correctly updated"); + assertEq(merkleTest.node(0, 1), newLeaves[1], "Node 0,1 should be correctly updated"); + + // Checking parent node + bytes32 l01Hashed = keccak(newLeaves[0], newLeaves[1]); + assertEq(merkleTest.node(1, 0), l01Hashed, "Node 1,0 should be correctly updated"); + } + + function test_threeLeaves() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + bytes32 leaf2 = keccak256("Leaf 2"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + merkleTest.pushNewLeaf(leaf2); + + // Preparing new leaves for full update + bytes32[] memory newLeaves = new bytes32[](3); + newLeaves[0] = keccak256("New Leaf 0"); + newLeaves[1] = keccak256("New Leaf 1"); + newLeaves[2] = keccak256("New Leaf 2"); + + // Updating all leaves + merkleTest.updateAllLeaves(newLeaves); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), newLeaves[0], "Node 0,0 should be correctly updated"); + assertEq(merkleTest.node(0, 1), newLeaves[1], "Node 0,1 should be correctly updated"); + assertEq(merkleTest.node(0, 2), newLeaves[2], "Node 0,2 should be correctly updated"); + + // Checking parent nodes + bytes32 l01Hashed = keccak(newLeaves[0], newLeaves[1]); + assertEq(merkleTest.node(1, 0), l01Hashed, "Node 1,0 should be correctly updated"); + // There is no leaf3 so we hash leaf2 with zero + bytes32 l23Hashed = keccak(newLeaves[2], merkleTest.zeros(0)); + assertEq(merkleTest.node(1, 1), l23Hashed, "Node 1,1 should be correctly updated"); + + // Checking root node + bytes32 l01l23Hashed = keccak(l01Hashed, l23Hashed); + assertEq(merkleTest.node(2, 0), l01l23Hashed, "Node 2,0 should be correctly updated"); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateAllNodesAtHeight.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateAllNodesAtHeight.t.sol new file mode 100644 index 000000000..be93cd032 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateAllNodesAtHeight.t.sol @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import {FullMerkleTest} from "./_FullMerkle_Shared.t.sol"; + +contract UpdateAllNodesAtHeightTest is FullMerkleTest { + function test_height0() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + + // Preparing new leaves for full update + bytes32[] memory newLeaves = new bytes32[](2); + newLeaves[0] = keccak256("New Leaf 0"); + newLeaves[1] = keccak256("New Leaf 1"); + + // Updating all nodes at height 0 + merkleTest.updateAllNodesAtHeight(0, newLeaves); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), newLeaves[0], "Node 0,0 should be correctly updated"); + assertEq(merkleTest.node(0, 1), newLeaves[1], "Node 0,1 should be correctly updated"); + + // Checking parent node + bytes32 l01Hashed = keccak(newLeaves[0], newLeaves[1]); + assertEq(merkleTest.node(1, 0), l01Hashed, "Node 1,0 should be correctly updated"); + } + + function test_height1() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + bytes32 leaf2 = keccak256("Leaf 2"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + merkleTest.pushNewLeaf(leaf2); + + // Preparing new leaves for full update + bytes32[] memory newLeaves = new bytes32[](2); + newLeaves[0] = keccak256("New Leaf 0"); + newLeaves[1] = keccak256("New Leaf 1"); + + // Updating all nodes at height 1 + merkleTest.updateAllNodesAtHeight(1, newLeaves); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), leaf0, "Node 0,0 should be correctly inserted"); + assertEq(merkleTest.node(0, 1), leaf1, "Node 0,1 should be correctly inserted"); + assertEq(merkleTest.node(0, 2), leaf2, "Node 0,2 should be correctly inserted"); + + // Checking parent nodes + assertEq(merkleTest.node(1, 0), newLeaves[0], "Node 1,0 should be correctly updated"); + assertEq(merkleTest.node(1, 1), newLeaves[1], "Node 1,1 should be correctly updated"); + } + + function test_height2() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + bytes32 leaf2 = keccak256("Leaf 2"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + merkleTest.pushNewLeaf(leaf2); + + // Preparing new leaves for full update + bytes32[] memory newLeaves = new bytes32[](1); + newLeaves[0] = keccak256("New Leaf 0"); + + // Updating all nodes at height 2 + merkleTest.updateAllNodesAtHeight(2, newLeaves); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), leaf0, "Node 0,0 should be correctly inserted"); + assertEq(merkleTest.node(0, 1), leaf1, "Node 0,1 should be correctly inserted"); + + // Checking parent node + assertEq(merkleTest.node(1, 0), keccak(leaf0, leaf1), "Node 1,0 should be correctly inserted"); + assertEq(merkleTest.node(2, 0), newLeaves[0], "Node 2,0 should be correctly updated"); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateLeaf.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateLeaf.t.sol new file mode 100644 index 000000000..ae29641b8 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/UpdateLeaf.t.sol @@ -0,0 +1,41 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import {FullMerkleTest} from "./_FullMerkle_Shared.t.sol"; + +contract UpdateLeafTest is FullMerkleTest { + function test_revertWhen_wrongIndex() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + + // Preparing new leaf 1 + bytes32 newLeaf1 = keccak256("New Leaf 1"); + + // Updating leaf 1 with wrong index + vm.expectRevert(bytes("FMT, wrong index")); + merkleTest.updateLeaf(2, newLeaf1); + } + + function test_updateLeaf() public { + // Inserting two leaves + bytes32 leaf0 = keccak256("Leaf 0"); + bytes32 leaf1 = keccak256("Leaf 1"); + merkleTest.pushNewLeaf(leaf0); + merkleTest.pushNewLeaf(leaf1); + + // Updating leaf 1 + bytes32 newLeaf1 = keccak256("New Leaf 1"); + merkleTest.updateLeaf(1, newLeaf1); + + // Checking leaf nodes + assertEq(merkleTest.node(0, 0), leaf0, "Node 0,0 should be correctly inserted"); + assertEq(merkleTest.node(0, 1), newLeaf1, "Node 0,1 should be correctly inserted"); + + // Checking parent node + bytes32 l01Hashed = keccak(leaf0, newLeaf1); + assertEq(merkleTest.node(1, 0), l01Hashed, "Node 1,0 should be correctly inserted"); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/_FullMerkle_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/_FullMerkle_Shared.t.sol new file mode 100644 index 000000000..29c271edd --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/FullMerkle/_FullMerkle_Shared.t.sol @@ -0,0 +1,23 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import {Test} from "forge-std/Test.sol"; + +import {FullMerkleTest as FullMerkleTestContract} from "contracts/dev-contracts/test/FullMerkleTest.sol"; + +contract FullMerkleTest is Test { + // add this to be excluded from coverage report + function test() internal {} + + FullMerkleTestContract internal merkleTest; + bytes32 constant zeroHash = keccak256(abi.encodePacked("ZERO")); + + function setUp() public { + merkleTest = new FullMerkleTestContract(zeroHash); + } + + // ### Helper functions ### + function keccak(bytes32 left, bytes32 right) internal pure returns (bytes32) { + return keccak256(abi.encodePacked(left, right)); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/IncrementalMerkle/IncrementalMerkle.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/IncrementalMerkle/IncrementalMerkle.t.sol new file mode 100644 index 000000000..bb7fe7090 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/IncrementalMerkle/IncrementalMerkle.t.sol @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {IncrementalMerkleTest} from "contracts/dev-contracts/test/IncrementalMerkleTest.sol"; + +contract IncrementalMerkleTestTest is Test { + IncrementalMerkleTest merkleTest; + bytes32[] elements; + bytes32 root; + bytes32 zero = "0x1234567"; + + function setUp() public { + merkleTest = new IncrementalMerkleTest(zero); + } + + function testCheckSetup() public { + assertEq(merkleTest.height(), 0); + assertEq(merkleTest.index(), 0); + } + + function testSingleElement() public { + addMoreElements(1); + + assertEq(merkleTest.root(), bytes32(abi.encodePacked(uint256(0)))); + assertEq(merkleTest.height(), 0); + assertEq(merkleTest.index(), 1); + } + + function testTwoElements() public { + addMoreElements(2); + + assertEq(merkleTest.root(), keccak256(abi.encodePacked(uint256(0), uint256(1)))); + assertEq(merkleTest.index(), 2); + assertEq(merkleTest.height(), 1); + } + + function testPrepare3Elements() public { + merkleTest.push(bytes32(uint256(2))); + merkleTest.push(bytes32(uint256(zero))); + assertEq(merkleTest.index(), 2); + assertEq(merkleTest.height(), 1); + assertEq(merkleTest.zeros(0), zero); + + assertEq(merkleTest.root(), keccak256(abi.encodePacked(uint256(2), uint256(zero)))); + } + + function testThreeElements() public { + addMoreElements(3); + + assertEq(merkleTest.index(), 3); + assertEq(merkleTest.height(), 2); + assertEq(merkleTest.zeros(0), zero); + assertEq(merkleTest.zeros(1), keccak256(abi.encodePacked(uint256(zero), uint256(zero)))); + assertEq(merkleTest.zeros(2), keccak256(abi.encodePacked(merkleTest.zeros(1), merkleTest.zeros(1)))); + assertEq(merkleTest.side(0), bytes32((uint256(2)))); + assertEq(merkleTest.side(1), keccak256(abi.encodePacked(uint256(0), uint256(1)))); + assertEq( + merkleTest.root(), + keccak256( + abi.encodePacked( + keccak256(abi.encodePacked(uint256(0), uint256(1))), + keccak256(abi.encodePacked(uint256(2), uint256(zero))) + ) + ) + ); + } + + function addMoreElements(uint256 n) public { + for (uint256 i = 0; i < n; i++) { + elements.push(bytes32(abi.encodePacked(i))); + merkleTest.push(elements[i]); + } + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/Merkle/Merkle.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/Merkle/Merkle.t.sol new file mode 100644 index 000000000..88e8c8efa --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/Merkle/Merkle.t.sol @@ -0,0 +1,141 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {MerkleTest} from "contracts/dev-contracts/test/MerkleTest.sol"; +import {MerkleTreeNoSort} from "./MerkleTreeNoSort.sol"; +import {MerklePathEmpty, MerkleIndexOutOfBounds, MerklePathOutOfBounds} from "contracts/common/L1ContractErrors.sol"; + +contract MerkleTestTest is Test { + MerkleTreeNoSort merkleTree; + MerkleTreeNoSort smallMerkleTree; + MerkleTest merkleTest; + bytes32[] elements; + bytes32 root; + + function setUp() public { + merkleTree = new MerkleTreeNoSort(); + smallMerkleTree = new MerkleTreeNoSort(); + merkleTest = new MerkleTest(); + + for (uint256 i = 0; i < 65; i++) { + elements.push(keccak256(abi.encodePacked(i))); + } + + root = merkleTree.getRoot(elements); + } + + function testElements(uint256 i) public { + vm.assume(i < elements.length); + bytes32 leaf = elements[i]; + bytes32[] memory proof = merkleTree.getProof(elements, i); + + bytes32 rootFromContract = merkleTest.calculateRoot(proof, i, leaf); + + assertEq(rootFromContract, root); + } + + function prepareRangeProof( + uint256 start, + uint256 end + ) public returns (bytes32[] memory, bytes32[] memory, bytes32[] memory) { + bytes32[] memory left = merkleTree.getProof(elements, start); + bytes32[] memory right = merkleTree.getProof(elements, end); + bytes32[] memory leaves = new bytes32[](end - start + 1); + for (uint256 i = start; i <= end; ++i) { + leaves[i - start] = elements[i]; + } + + return (left, right, leaves); + } + + function testFirstElement() public { + testElements(0); + } + + function testLastElement() public { + testElements(elements.length - 1); + } + + function testEmptyProof_shouldRevert() public { + bytes32 leaf = elements[0]; + bytes32[] memory proof; + + vm.expectRevert(MerklePathEmpty.selector); + merkleTest.calculateRoot(proof, 0, leaf); + } + + function testLeafIndexTooBig_shouldRevert() public { + bytes32 leaf = elements[0]; + bytes32[] memory proof = merkleTree.getProof(elements, 0); + + vm.expectRevert(MerkleIndexOutOfBounds.selector); + merkleTest.calculateRoot(proof, 2 ** 255, leaf); + } + + function testProofLengthTooLarge_shouldRevert() public { + bytes32 leaf = elements[0]; + bytes32[] memory proof = new bytes32[](256); + + vm.expectRevert(MerklePathOutOfBounds.selector); + merkleTest.calculateRoot(proof, 0, leaf); + } + + function testRangeProof() public { + (bytes32[] memory left, bytes32[] memory right, bytes32[] memory leaves) = prepareRangeProof(10, 13); + bytes32 rootFromContract = merkleTest.calculateRoot(left, right, 10, leaves); + assertEq(rootFromContract, root); + } + + function testRangeProofIncorrect() public { + (bytes32[] memory left, bytes32[] memory right, bytes32[] memory leaves) = prepareRangeProof(10, 13); + bytes32 rootFromContract = merkleTest.calculateRoot(left, right, 9, leaves); + assertNotEq(rootFromContract, root); + } + + function testRangeProofLengthMismatch_shouldRevert() public { + (, bytes32[] memory right, bytes32[] memory leaves) = prepareRangeProof(10, 13); + bytes32[] memory leftShortened = new bytes32[](right.length - 1); + + vm.expectRevert(bytes("Merkle: path length mismatch")); + merkleTest.calculateRoot(leftShortened, right, 10, leaves); + } + + function testRangeProofEmptyPaths_shouldRevert() public { + (, , bytes32[] memory leaves) = prepareRangeProof(10, 13); + bytes32[] memory left; + bytes32[] memory right; + + vm.expectRevert(MerklePathEmpty.selector); + merkleTest.calculateRoot(left, right, 10, leaves); + } + + function testRangeProofWrongIndex_shouldRevert() public { + (bytes32[] memory left, bytes32[] memory right, bytes32[] memory leaves) = prepareRangeProof(10, 13); + vm.expectRevert(bytes("Merkle: index/height mismatch")); + merkleTest.calculateRoot(left, right, 128, leaves); + } + + function testRangeProofSingleLeaf() public { + (bytes32[] memory left, bytes32[] memory right, bytes32[] memory leaves) = prepareRangeProof(10, 10); + bytes32 rootFromContract = merkleTest.calculateRoot(left, right, 10, leaves); + assertEq(rootFromContract, root); + } + + function testRangeProofEmpty_shouldRevert() public { + bytes32[] memory left = merkleTree.getProof(elements, 10); + bytes32[] memory right = merkleTree.getProof(elements, 10); + bytes32[] memory leaves; + vm.expectRevert(bytes("Merkle: nothing to prove")); + merkleTest.calculateRoot(left, right, 10, leaves); + } + + function testRangeProofSingleElementTree() public { + bytes32[] memory leaves = new bytes32[](1); + leaves[0] = elements[10]; + bytes32[] memory left = new bytes32[](0); + bytes32[] memory right = new bytes32[](0); + bytes32 rootFromContract = merkleTest.calculateRoot(left, right, 0, leaves); + assertEq(rootFromContract, leaves[0]); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/Merkle/MerkleTreeNoSort.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/Merkle/MerkleTreeNoSort.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/Merkle/MerkleTreeNoSort.sol rename to l1-contracts/test/foundry/l1/unit/concrete/common/libraries/Merkle/MerkleTreeNoSort.sol diff --git a/l1-contracts/test/foundry/unit/concrete/common/libraries/UncheckedMath/UncheckedAdd.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UncheckedMath/UncheckedAdd.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/common/libraries/UncheckedMath/UncheckedAdd.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UncheckedMath/UncheckedAdd.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/common/libraries/UncheckedMath/UncheckedInc.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UncheckedMath/UncheckedInc.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/common/libraries/UncheckedMath/UncheckedInc.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UncheckedMath/UncheckedInc.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/common/libraries/UncheckedMath/_UncheckedMath_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UncheckedMath/_UncheckedMath_Shared.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/common/libraries/UncheckedMath/_UncheckedMath_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UncheckedMath/_UncheckedMath_Shared.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/common/libraries/UnsafeBytes/UnsafeBytes.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UnsafeBytes/UnsafeBytes.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/common/libraries/UnsafeBytes/UnsafeBytes.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/common/libraries/UnsafeBytes/UnsafeBytes.t.sol diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/Admin.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/Admin.t.sol new file mode 100644 index 000000000..5194b1da5 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/Admin.t.sol @@ -0,0 +1,38 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; + +contract AdminTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + + function test_setPendingAdmin() public { + address newAdmin = makeAddr("newAdmin"); + + vm.expectEmit(true, true, true, false); + emit IChainTypeManager.NewPendingAdmin(address(0), newAdmin); + chainContractAddress.setPendingAdmin(newAdmin); + } + + function test_acceptPendingAdmin() public { + address newAdmin = makeAddr("newAdmin"); + + chainContractAddress.setPendingAdmin(newAdmin); + + // Need this because in shared setup we start a prank as the governor + vm.stopPrank(); + vm.prank(newAdmin); + vm.expectEmit(true, true, true, false); + emit IChainTypeManager.NewPendingAdmin(newAdmin, address(0)); + vm.expectEmit(true, true, true, false); + emit IChainTypeManager.NewAdmin(address(0), newAdmin); + chainContractAddress.acceptAdmin(); + + address currentAdmin = chainContractAddress.admin(); + + assertEq(currentAdmin, newAdmin); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/CreateNewChain.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/CreateNewChain.t.sol new file mode 100644 index 000000000..c422dca99 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/CreateNewChain.t.sol @@ -0,0 +1,50 @@ +// // SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {Unauthorized, HashMismatch} from "contracts/common/L1ContractErrors.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; + +contract createNewChainTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + + function test_RevertWhen_InitialDiamondCutHashMismatch() public { + Diamond.DiamondCutData memory initialDiamondCutData = getDiamondCutData(sharedBridge); + Diamond.DiamondCutData memory correctDiamondCutData = getDiamondCutData(address(diamondInit)); + + vm.expectRevert( + abi.encodeWithSelector( + HashMismatch.selector, + keccak256(abi.encode(correctDiamondCutData)), + keccak256(abi.encode(initialDiamondCutData)) + ) + ); + createNewChain(initialDiamondCutData); + } + + function test_RevertWhen_CalledNotByBridgehub() public { + Diamond.DiamondCutData memory initialDiamondCutData = getDiamondCutData(diamondInit); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, governor)); + chainContractAddress.createNewChain({ + _chainId: chainId, + _baseTokenAssetId: DataEncoding.encodeNTVAssetId(block.chainid, baseToken), + _admin: admin, + _initData: abi.encode(abi.encode(initialDiamondCutData), bytes("")), + _factoryDeps: new bytes[](0) + }); + } + + function test_SuccessfulCreationOfNewChain() public { + address newChainAddress = createNewChain(getDiamondCutData(diamondInit)); + + address admin = IZKChain(newChainAddress).getAdmin(); + + assertEq(newChainAdmin, admin); + assertNotEq(newChainAddress, address(0)); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/FreezeChain.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/FreezeChain.t.sol new file mode 100644 index 000000000..73a2dc498 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/FreezeChain.t.sol @@ -0,0 +1,35 @@ +// // SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {FacetIsFrozen} from "contracts/common/L1ContractErrors.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +contract freezeChainTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + + function test_FreezingChain() public { + address newChainAddress = createNewChain(getDiamondCutData(diamondInit)); + vm.mockCall( + address(bridgehub), + abi.encodeWithSelector(IBridgehub.getZKChain.selector), + abi.encode(newChainAddress) + ); + GettersFacet gettersFacet = GettersFacet(newChainAddress); + bool isChainFrozen = gettersFacet.isDiamondStorageFrozen(); + assertEq(isChainFrozen, false); + vm.stopPrank(); + vm.startPrank(governor); + chainContractAddress.freezeChain(block.chainid); + // Repeated call should revert + vm.expectRevert(bytes("q1")); // storage frozen + chainContractAddress.freezeChain(block.chainid); + // Call fails as storage is frozen + vm.expectRevert(bytes("q1")); + isChainFrozen = gettersFacet.isDiamondStorageFrozen(); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/RevertBatches.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/RevertBatches.t.sol new file mode 100644 index 000000000..610c6c1a3 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/RevertBatches.t.sol @@ -0,0 +1,249 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Vm} from "forge-std/Test.sol"; + +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; + +import {Utils, L2_SYSTEM_CONTEXT_ADDRESS, L2_DA_VALIDATOR_ADDRESS} from "../../Utils/Utils.sol"; +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; + +import {COMMIT_TIMESTAMP_NOT_OLDER, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK, POINT_EVALUATION_PRECOMPILE_ADDR, REQUIRED_L2_GAS_PRICE_PER_PUBDATA, SYSTEM_UPGRADE_L2_TX_TYPE, PRIORITY_TX_MAX_GAS_LIMIT} from "contracts/common/Config.sol"; +import {L2_FORCE_DEPLOYER_ADDR, L2_COMPLEX_UPGRADER_ADDR, L2_GENESIS_UPGRADE_ADDR} from "contracts/common/L2ContractAddresses.sol"; //, COMPLEX_UPGRADER_ADDR, GENESIS_UPGRADE_ADDR +import {SemVer} from "contracts/common/libraries/SemVer.sol"; +import {L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; +import {IExecutor, SystemLogKey, TOTAL_BLOBS_IN_COMMITMENT} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; +import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; +import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; +import {IL2GenesisUpgrade} from "contracts/state-transition/l2-deps/IL2GenesisUpgrade.sol"; +import {IComplexUpgrader} from "contracts/state-transition/l2-deps/IComplexUpgrader.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +contract revertBatchesTest is ChainTypeManagerTest { + // Items for logs & commits + uint256 internal currentTimestamp; + IExecutor.CommitBatchInfo internal newCommitBatchInfo; + IExecutor.StoredBatchInfo internal newStoredBatchInfo; + IExecutor.StoredBatchInfo internal genesisStoredBatchInfo; + uint256[] internal proofInput; + bytes32 l2DAValidatorOutputHash; + bytes operatorDAInput; + bytes defaultBlobCommitment; + bytes32[] defaultBlobVersionedHashes; + bytes16 defaultBlobOpeningPoint = 0x7142c5851421a2dc03dde0aabdb0ffdb; + bytes32 defaultBlobClaimedValue = 0x1e5eea3bbb85517461c1d1c7b84c7c2cec050662a5e81a71d5d7e2766eaff2f0; + bytes l2Logs; + address newChainAddress; + + bytes32 constant EMPTY_PREPUBLISHED_COMMITMENT = 0x0000000000000000000000000000000000000000000000000000000000000000; + bytes constant POINT_EVALUATION_PRECOMPILE_RESULT = + hex"000000000000000000000000000000000000000000000000000000000000100073eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001"; + + // Facets exposing the diamond + AdminFacet internal adminFacet; + ExecutorFacet internal executorFacet; + GettersFacet internal gettersFacet; + + function setUp() public { + deploy(); + + defaultBlobCommitment = Utils.getDefaultBlobCommitment(); + defaultBlobVersionedHashes = new bytes32[](1); + defaultBlobVersionedHashes[0] = 0x01c024b4740620a5849f95930cefe298933bdf588123ea897cdf0f2462f6d2d5; + + bytes memory precompileInput = Utils.defaultPointEvaluationPrecompileInput(defaultBlobVersionedHashes[0]); + vm.mockCall(POINT_EVALUATION_PRECOMPILE_ADDR, precompileInput, POINT_EVALUATION_PRECOMPILE_RESULT); + + l2Logs = Utils.encodePacked(Utils.createSystemLogs(bytes32(0))); + genesisStoredBatchInfo = IExecutor.StoredBatchInfo({ + batchNumber: 0, + batchHash: bytes32(uint256(0x01)), + indexRepeatedStorageChanges: 0x01, + numberOfLayer1Txs: 0, + priorityOperationsHash: keccak256(""), + l2LogsTreeRoot: DEFAULT_L2_LOGS_TREE_ROOT_HASH, + timestamp: 0, + commitment: bytes32(uint256(0x01)) + }); + vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1 + 1); + currentTimestamp = block.timestamp; + newCommitBatchInfo = IExecutor.CommitBatchInfo({ + batchNumber: 1, + timestamp: uint64(currentTimestamp), + indexRepeatedStorageChanges: 0, + newStateRoot: Utils.randomBytes32("newStateRoot"), + numberOfLayer1Txs: 0, + priorityOperationsHash: keccak256(""), + bootloaderHeapInitialContentsHash: Utils.randomBytes32("bootloaderHeapInitialContentsHash"), + eventsQueueStateHash: Utils.randomBytes32("eventsQueueStateHash"), + systemLogs: l2Logs, + operatorDAInput: "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + }); + + { + bytes memory complexUpgraderCalldata; + address l1CtmDeployer = address(bridgehub.l1CtmDeployer()); + { + bytes memory l2GenesisUpgradeCalldata = abi.encodeCall( + IL2GenesisUpgrade.genesisUpgrade, + (chainId, l1CtmDeployer, forceDeploymentsData, "0x") + ); + complexUpgraderCalldata = abi.encodeCall( + IComplexUpgrader.upgrade, + (L2_GENESIS_UPGRADE_ADDR, l2GenesisUpgradeCalldata) + ); + } + + // slither-disable-next-line unused-return + (, uint32 minorVersion, ) = SemVer.unpackSemVer(SafeCast.toUint96(0)); + } + + newChainAddress = createNewChain(getDiamondCutData(diamondInit)); + vm.mockCall( + address(bridgehub), + abi.encodeWithSelector(IBridgehub.getZKChain.selector), + abi.encode(newChainAddress) + ); + + executorFacet = ExecutorFacet(address(newChainAddress)); + gettersFacet = GettersFacet(address(newChainAddress)); + adminFacet = AdminFacet(address(newChainAddress)); + + vm.stopPrank(); + vm.prank(newChainAdmin); + adminFacet.setDAValidatorPair(address(rollupL1DAValidator), L2_DA_VALIDATOR_ADDRESS); + } + + function test_SuccessfulBatchReverting() public { + vm.startPrank(governor); + + bytes32 uncompressedStateDiffHash = Utils.randomBytes32("uncompressedStateDiffHash"); + bytes32 totalL2PubdataHash = Utils.randomBytes32("totalL2PubdataHash"); + uint8 numberOfBlobs = 1; + bytes32[] memory blobsLinearHashes = new bytes32[](1); + blobsLinearHashes[0] = Utils.randomBytes32("blobsLinearHashes"); + + operatorDAInput = abi.encodePacked( + uncompressedStateDiffHash, + totalL2PubdataHash, + numberOfBlobs, + blobsLinearHashes, + bytes1(0x01), + defaultBlobCommitment, + EMPTY_PREPUBLISHED_COMMITMENT + ); + + l2DAValidatorOutputHash = Utils.constructRollupL2DAValidatorOutputHash( + uncompressedStateDiffHash, + totalL2PubdataHash, + uint8(numberOfBlobs), + blobsLinearHashes + ); + + vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1); + currentTimestamp = block.timestamp; + bytes32 expectedSystemContractUpgradeTxHash = gettersFacet.getL2SystemContractsUpgradeTxHash(); + bytes[] memory correctL2Logs = Utils.createSystemLogsWithUpgradeTransactionForCTM( + expectedSystemContractUpgradeTxHash, + l2DAValidatorOutputHash + ); + correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), + Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + + IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; + correctNewCommitBatchInfo.timestamp = uint64(currentTimestamp); + correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); + correctNewCommitBatchInfo.operatorDAInput = operatorDAInput; + + bytes32[] memory blobHashes = new bytes32[](TOTAL_BLOBS_IN_COMMITMENT); + blobHashes[0] = blobsLinearHashes[0]; + + bytes32[] memory blobCommitments = new bytes32[](TOTAL_BLOBS_IN_COMMITMENT); + blobCommitments[0] = keccak256( + abi.encodePacked( + defaultBlobVersionedHashes[0], + abi.encodePacked(defaultBlobOpeningPoint, defaultBlobClaimedValue) + ) + ); + + bytes32 expectedBatchCommitment = Utils.createBatchCommitment( + correctNewCommitBatchInfo, + uncompressedStateDiffHash, + blobCommitments, + blobHashes + ); + + IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); + correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; + correctCommitBatchInfoArray[0].operatorDAInput = operatorDAInput; + + vm.stopPrank(); + vm.startPrank(validator); + vm.blobhashes(defaultBlobVersionedHashes); + vm.recordLogs(); + (uint256 commitBatchFrom, uint256 commitBatchTo, bytes memory commitData) = Utils.encodeCommitBatchesData( + genesisStoredBatchInfo, + correctCommitBatchInfoArray + ); + executorFacet.commitBatchesSharedBridge(uint256(0), commitBatchFrom, commitBatchTo, commitData); + + Vm.Log[] memory entries = vm.getRecordedLogs(); + + assertEq(entries.length, 1); + assertEq(entries[0].topics[0], keccak256("BlockCommit(uint256,bytes32,bytes32)")); + assertEq(entries[0].topics[1], bytes32(uint256(1))); // batchNumber + assertEq(entries[0].topics[2], correctNewCommitBatchInfo.newStateRoot); // batchHash + + uint256 totalBatchesCommitted = gettersFacet.getTotalBatchesCommitted(); + assertEq(totalBatchesCommitted, 1); + + newStoredBatchInfo = IExecutor.StoredBatchInfo({ + batchNumber: 1, + batchHash: entries[0].topics[2], + indexRepeatedStorageChanges: 0, + numberOfLayer1Txs: 0, + priorityOperationsHash: keccak256(""), + l2LogsTreeRoot: DEFAULT_L2_LOGS_TREE_ROOT_HASH, + timestamp: currentTimestamp, + commitment: entries[0].topics[3] + }); + + IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); + storedBatchInfoArray[0] = newStoredBatchInfo; + + (uint256 proveBatchFrom, uint256 proveBatchTo, bytes memory proveData) = Utils.encodeProveBatchesData( + genesisStoredBatchInfo, + storedBatchInfoArray, + proofInput + ); + + executorFacet.proveBatchesSharedBridge(uint256(0), proveBatchFrom, proveBatchTo, proveData); + + // Test batch revert triggered from CTM + vm.stopPrank(); + vm.prank(address(chainContractAddress)); + adminFacet.setValidator(address(chainContractAddress), true); + vm.startPrank(governor); + + uint256 totalBlocksCommittedBefore = gettersFacet.getTotalBlocksCommitted(); + assertEq(totalBlocksCommittedBefore, 1, "totalBlocksCommittedBefore"); + + uint256 totalBlocksVerifiedBefore = gettersFacet.getTotalBlocksVerified(); + assertEq(totalBlocksVerifiedBefore, 1, "totalBlocksVerifiedBefore"); + + chainContractAddress.revertBatches(chainId, 0); + + uint256 totalBlocksCommitted = gettersFacet.getTotalBlocksCommitted(); + assertEq(totalBlocksCommitted, 0, "totalBlocksCommitted"); + + uint256 totalBlocksVerified = gettersFacet.getTotalBlocksVerified(); + assertEq(totalBlocksVerified, 0, "totalBlocksVerified"); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetChainCreationParams.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetChainCreationParams.t.sol similarity index 83% rename from l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetChainCreationParams.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetChainCreationParams.t.sol index 85fa1a316..e55334737 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetChainCreationParams.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetChainCreationParams.t.sol @@ -1,13 +1,17 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -import {ChainCreationParams} from "contracts/state-transition/IStateTransitionManager.sol"; +import {ChainCreationParams} from "contracts/state-transition/IChainTypeManager.sol"; import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; import {EMPTY_STRING_KECCAK, DEFAULT_L2_LOGS_TREE_ROOT_HASH} from "contracts/common/Config.sol"; -contract SetChainCreationParamsTest is StateTransitionManagerTest { +contract SetChainCreationParamsTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + function test_SettingInitialCutHash() public { bytes32 initialCutHash = keccak256(abi.encode(getDiamondCutData(address(diamondInit)))); address randomDiamondInit = address(0x303030303030303030303); @@ -27,13 +31,14 @@ contract SetChainCreationParamsTest is StateTransitionManagerTest { genesisBatchHash: genesisBatchHash, genesisIndexRepeatedStorageChanges: genesisIndexRepeatedStorageChanges, genesisBatchCommitment: genesisBatchCommitment, - diamondCut: newDiamondCutData + diamondCut: newDiamondCutData, + forceDeploymentsData: bytes("") }); chainContractAddress.setChainCreationParams(newChainCreationParams); assertEq(chainContractAddress.initialCutHash(), newCutHash, "Initial cut hash update was not successful"); - assertEq(chainContractAddress.genesisUpgrade(), newGenesisUpgrade, "Genesis upgrade was not set correctly"); + assertEq(chainContractAddress.l1GenesisUpgrade(), newGenesisUpgrade, "Genesis upgrade was not set correctly"); // We need to initialize the state hash because it is used in the commitment of the next batch IExecutor.StoredBatchInfo memory newBatchZero = IExecutor.StoredBatchInfo({ diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetNewVersionUpgrade.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetNewVersionUpgrade.t.sol similarity index 69% rename from l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetNewVersionUpgrade.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetNewVersionUpgrade.t.sol index ced7e3f7d..1dbaa2462 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetNewVersionUpgrade.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetNewVersionUpgrade.t.sol @@ -1,10 +1,14 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -contract setNewVersionUpgradeTest is StateTransitionManagerTest { +contract setNewVersionUpgradeTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + function test_SettingNewVersionUpgrade() public { assertEq(chainContractAddress.protocolVersion(), 0, "Initial protocol version is not correct"); @@ -16,5 +20,11 @@ contract setNewVersionUpgradeTest is StateTransitionManagerTest { assertEq(chainContractAddress.upgradeCutHash(0), newCutHash, "Diamond cut upgrade was not successful"); assertEq(chainContractAddress.protocolVersion(), 1, "New protocol version is not correct"); + + (uint32 major, uint32 minor, uint32 patch) = chainContractAddress.getSemverProtocolVersion(); + + assertEq(major, 0); + assertEq(minor, 0); + assertEq(patch, 1); } } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetUpgradeDiamondCut.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetUpgradeDiamondCut.t.sol similarity index 81% rename from l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetUpgradeDiamondCut.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetUpgradeDiamondCut.t.sol index a71f35d2e..d5ca40d50 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetUpgradeDiamondCut.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetUpgradeDiamondCut.t.sol @@ -1,10 +1,14 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -contract setUpgradeDiamondCutTest is StateTransitionManagerTest { +contract setUpgradeDiamondCutTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + function test_SettingUpgradeDiamondCut() public { assertEq(chainContractAddress.protocolVersion(), 0, "Initial protocol version is not correct"); diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetValidatorTimelock.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetValidatorTimelock.t.sol new file mode 100644 index 000000000..cbe0f1c6d --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/SetValidatorTimelock.t.sol @@ -0,0 +1,48 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; + +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; + +contract setValidatorTimelockTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + + function test_SettingValidatorTimelock() public { + assertEq( + chainContractAddress.validatorTimelock(), + validator, + "Initial validator timelock address is not correct" + ); + + address newValidatorTimelock = address(0x0000000000000000000000000000000000004235); + chainContractAddress.setValidatorTimelock(newValidatorTimelock); + + assertEq( + chainContractAddress.validatorTimelock(), + newValidatorTimelock, + "Validator timelock update was not successful" + ); + } + + function test_RevertWhen_NotOwner() public { + // Need this because in shared setup we start a prank as the governor + vm.stopPrank(); + + address notOwner = makeAddr("notOwner"); + assertEq( + chainContractAddress.validatorTimelock(), + validator, + "Initial validator timelock address is not correct" + ); + + vm.prank(notOwner); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, notOwner)); + address newValidatorTimelock = address(0x0000000000000000000000000000000000004235); + chainContractAddress.setValidatorTimelock(newValidatorTimelock); + + assertEq(chainContractAddress.validatorTimelock(), validator, "Validator should not have been updated"); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/StateTransitionOwnerZero.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/StateTransitionOwnerZero.t.sol new file mode 100644 index 000000000..88c786c7b --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/StateTransitionOwnerZero.t.sol @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {ChainTypeManagerTest} from "./_ChainTypeManager_Shared.t.sol"; +import {ChainTypeManager} from "contracts/state-transition/ChainTypeManager.sol"; +import {ChainTypeManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IChainTypeManager.sol"; +import {ZeroAddress} from "contracts/common/L1ContractErrors.sol"; + +contract initializingCTMOwnerZeroTest is ChainTypeManagerTest { + function setUp() public { + deploy(); + } + + function test_InitializingCTMWithGovernorZeroShouldRevert() public { + ChainCreationParams memory chainCreationParams = ChainCreationParams({ + genesisUpgrade: address(genesisUpgradeContract), + genesisBatchHash: bytes32(uint256(0x01)), + genesisIndexRepeatedStorageChanges: 1, + genesisBatchCommitment: bytes32(uint256(0x01)), + diamondCut: getDiamondCutData(address(diamondInit)), + forceDeploymentsData: bytes("") + }); + + ChainTypeManagerInitializeData memory ctmInitializeDataNoOwner = ChainTypeManagerInitializeData({ + owner: address(0), + validatorTimelock: validator, + chainCreationParams: chainCreationParams, + protocolVersion: 0 + }); + + vm.expectRevert(ZeroAddress.selector); + new TransparentUpgradeableProxy( + address(chainTypeManager), + admin, + abi.encodeCall(ChainTypeManager.initialize, ctmInitializeDataNoOwner) + ); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/_ChainTypeManager_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/_ChainTypeManager_Shared.t.sol new file mode 100644 index 000000000..5ecaa7407 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/ChainTypeManager/_ChainTypeManager_Shared.t.sol @@ -0,0 +1,190 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.21; + +import {Test} from "forge-std/Test.sol"; +import {console2 as console} from "forge-std/Script.sol"; + +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; + +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; +import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; +import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; +import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; +import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; +import {L1GenesisUpgrade} from "contracts/upgrades/L1GenesisUpgrade.sol"; +import {InitializeDataNewChain} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; +import {ChainTypeManager} from "contracts/state-transition/ChainTypeManager.sol"; +import {ChainTypeManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IChainTypeManager.sol"; +import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; +import {DummyBridgehub} from "contracts/dev-contracts/test/DummyBridgehub.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {ZeroAddress} from "contracts/common/L1ContractErrors.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {RollupL1DAValidator} from "da-contracts/RollupL1DAValidator.sol"; + +contract ChainTypeManagerTest is Test { + ChainTypeManager internal chainTypeManager; + ChainTypeManager internal chainContractAddress; + L1GenesisUpgrade internal genesisUpgradeContract; + Bridgehub internal bridgehub; + RollupL1DAValidator internal rollupL1DAValidator; + address internal diamondInit; + address internal constant governor = address(0x1010101); + address internal constant admin = address(0x2020202); + address internal constant baseToken = address(0x3030303); + address internal constant sharedBridge = address(0x4040404); + address internal constant validator = address(0x5050505); + address internal constant l1Nullifier = address(0x6060606); + address internal newChainAdmin; + uint256 chainId = 112; + address internal testnetVerifier = address(new TestnetVerifier()); + bytes internal forceDeploymentsData = hex""; + uint256 eraChainId = 9; + + Diamond.FacetCut[] internal facetCuts; + + function deploy() public { + bridgehub = new Bridgehub(block.chainid, governor, type(uint256).max); + vm.prank(governor); + bridgehub.setAddresses(sharedBridge, ICTMDeploymentTracker(address(0)), IMessageRoot(address(0))); + + vm.mockCall( + address(sharedBridge), + abi.encodeCall(L1AssetRouter.l2BridgeAddress, (chainId)), + abi.encode(makeAddr("l2BridgeAddress")) + ); + + newChainAdmin = makeAddr("chainadmin"); + + vm.startPrank(address(bridgehub)); + chainTypeManager = new ChainTypeManager(address(IBridgehub(address(bridgehub)))); + diamondInit = address(new DiamondInit()); + genesisUpgradeContract = new L1GenesisUpgrade(); + + facetCuts.push( + Diamond.FacetCut({ + facet: address(new UtilsFacet()), + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getUtilsFacetSelectors() + }) + ); + facetCuts.push( + Diamond.FacetCut({ + facet: address(new AdminFacet(block.chainid)), + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getAdminSelectors() + }) + ); + facetCuts.push( + Diamond.FacetCut({ + facet: address(new ExecutorFacet()), + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getExecutorSelectors() + }) + ); + facetCuts.push( + Diamond.FacetCut({ + facet: address(new GettersFacet()), + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getGettersSelectors() + }) + ); + + ChainCreationParams memory chainCreationParams = ChainCreationParams({ + genesisUpgrade: address(genesisUpgradeContract), + genesisBatchHash: bytes32(uint256(0x01)), + genesisIndexRepeatedStorageChanges: 0x01, + genesisBatchCommitment: bytes32(uint256(0x01)), + diamondCut: getDiamondCutData(address(diamondInit)), + forceDeploymentsData: forceDeploymentsData + }); + + ChainTypeManagerInitializeData memory ctmInitializeDataNoGovernor = ChainTypeManagerInitializeData({ + owner: address(0), + validatorTimelock: validator, + chainCreationParams: chainCreationParams, + protocolVersion: 0 + }); + + vm.expectRevert(ZeroAddress.selector); + new TransparentUpgradeableProxy( + address(chainTypeManager), + admin, + abi.encodeCall(ChainTypeManager.initialize, ctmInitializeDataNoGovernor) + ); + + ChainTypeManagerInitializeData memory ctmInitializeData = ChainTypeManagerInitializeData({ + owner: governor, + validatorTimelock: validator, + chainCreationParams: chainCreationParams, + protocolVersion: 0 + }); + + TransparentUpgradeableProxy transparentUpgradeableProxy = new TransparentUpgradeableProxy( + address(chainTypeManager), + admin, + abi.encodeCall(ChainTypeManager.initialize, ctmInitializeData) + ); + chainContractAddress = ChainTypeManager(address(transparentUpgradeableProxy)); + + rollupL1DAValidator = new RollupL1DAValidator(); + + vm.stopPrank(); + vm.startPrank(governor); + } + + function getDiamondCutData(address _diamondInit) internal view returns (Diamond.DiamondCutData memory) { + InitializeDataNewChain memory initializeData = Utils.makeInitializeDataForNewChain(testnetVerifier); + + bytes memory initCalldata = abi.encode(initializeData); + + return Diamond.DiamondCutData({facetCuts: facetCuts, initAddress: _diamondInit, initCalldata: initCalldata}); + } + + function getCTMInitData() internal view returns (bytes memory) { + return abi.encode(abi.encode(getDiamondCutData(diamondInit)), forceDeploymentsData); + } + + function createNewChain(Diamond.DiamondCutData memory _diamondCut) internal returns (address) { + vm.stopPrank(); + vm.startPrank(address(bridgehub)); + + vm.mockCall( + address(sharedBridge), + abi.encodeWithSelector(IL1AssetRouter.L1_NULLIFIER.selector), + abi.encode(l1Nullifier) + ); + + vm.mockCall( + address(l1Nullifier), + abi.encodeWithSelector(IL1Nullifier.l2BridgeAddress.selector), + abi.encode(l1Nullifier) + ); + + return + chainContractAddress.createNewChain({ + _chainId: chainId, + _baseTokenAssetId: DataEncoding.encodeNTVAssetId(block.chainid, baseToken), + _admin: newChainAdmin, + _initData: abi.encode(abi.encode(_diamondCut), bytes("")), + _factoryDeps: new bytes[](0) + }); + } + + // add this to be excluded from coverage report + function test() internal virtual {} +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondInit/Initialize.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondInit/Initialize.t.sol similarity index 66% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondInit/Initialize.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondInit/Initialize.t.sol index 3c2b01dd5..ae947e405 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondInit/Initialize.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondInit/Initialize.t.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.24; import {DiamondInitTest} from "./_DiamondInit_Shared.t.sol"; -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; @@ -11,11 +11,12 @@ import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.s import {InitializeData} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; import {IVerifier} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; import {MAX_GAS_PER_TRANSACTION} from "contracts/common/Config.sol"; +import {MalformedCalldata, ZeroAddress, TooMuchGas} from "contracts/common/L1ContractErrors.sol"; contract InitializeTest is DiamondInitTest { - function test_revertWhen_verifierIsZeroAddress() public { + function test_revertWhen_dualVerifierIsZeroAddress() public { InitializeData memory initializeData = Utils.makeInitializeData(testnetVerifier); - initializeData.verifier = IVerifier(address(0)); + initializeData.dualVerifier = IVerifier(address(0)); Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ facetCuts: facetCuts, @@ -23,10 +24,39 @@ contract InitializeTest is DiamondInitTest { initCalldata: abi.encodeWithSelector(DiamondInit.initialize.selector, initializeData) }); - vm.expectRevert(bytes.concat("vt")); + vm.expectRevert(ZeroAddress.selector); new DiamondProxy(block.chainid, diamondCutData); } + function test_revertWhen_plonkVerifierIsZeroAddress() public { + InitializeData memory initializeData = Utils.makeInitializeData(testnetVerifier); + initializeData.plonkVerifier = address(0); + + Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ + facetCuts: facetCuts, + initAddress: address(new DiamondInit()), + initCalldata: abi.encodeWithSelector(DiamondInit.initialize.selector, initializeData) + }); + + vm.expectRevert(ZeroAddress.selector); + new DiamondProxy(block.chainid, diamondCutData); + } + + function test_revertWhen_fflonkVerifierIsZeroAddress() public { + InitializeData memory initializeData = Utils.makeInitializeData(testnetVerifier); + initializeData.fflonkVerifier = address(0); + + Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ + facetCuts: facetCuts, + initAddress: address(new DiamondInit()), + initCalldata: abi.encodeWithSelector(DiamondInit.initialize.selector, initializeData) + }); + + vm.expectRevert(ZeroAddress.selector); + new DiamondProxy(block.chainid, diamondCutData); + } + + function test_revertWhen_governorIsZeroAddress() public { InitializeData memory initializeData = Utils.makeInitializeData(testnetVerifier); initializeData.admin = address(0); @@ -37,7 +67,7 @@ contract InitializeTest is DiamondInitTest { initCalldata: abi.encodeWithSelector(DiamondInit.initialize.selector, initializeData) }); - vm.expectRevert(bytes.concat("vy")); + vm.expectRevert(ZeroAddress.selector); new DiamondProxy(block.chainid, diamondCutData); } @@ -51,7 +81,7 @@ contract InitializeTest is DiamondInitTest { initCalldata: abi.encodeWithSelector(DiamondInit.initialize.selector, initializeData) }); - vm.expectRevert(bytes.concat("hc")); + vm.expectRevert(ZeroAddress.selector); new DiamondProxy(block.chainid, diamondCutData); } @@ -65,7 +95,7 @@ contract InitializeTest is DiamondInitTest { initCalldata: abi.encodeWithSelector(DiamondInit.initialize.selector, initializeData) }); - vm.expectRevert(bytes.concat("vu")); + vm.expectRevert(TooMuchGas.selector); new DiamondProxy(block.chainid, diamondCutData); } @@ -83,12 +113,15 @@ contract InitializeTest is DiamondInitTest { assertEq(utilsFacet.util_getChainId(), initializeData.chainId); assertEq(utilsFacet.util_getBridgehub(), initializeData.bridgehub); - assertEq(utilsFacet.util_getStateTransitionManager(), initializeData.stateTransitionManager); - assertEq(utilsFacet.util_getBaseToken(), initializeData.baseToken); - assertEq(utilsFacet.util_getBaseTokenBridge(), initializeData.baseTokenBridge); + assertEq(utilsFacet.util_getChainTypeManager(), initializeData.chainTypeManager); + assertEq(utilsFacet.util_getBaseTokenAssetId(), initializeData.baseTokenAssetId); assertEq(utilsFacet.util_getProtocolVersion(), initializeData.protocolVersion); - assertEq(address(utilsFacet.util_getVerifier()), address(initializeData.verifier)); + assertEq(address(utilsFacet.util_getDualVerifier()), address(initializeData.dualVerifier)); + assertEq(utilsFacet.util_getPlonkVerifier(), initializeData.plonkVerifier); + assertEq(utilsFacet.util_getFflonkVerifier(), initializeData.fflonkVerifier); + assertEq(utilsFacet.util_getFflonkProofLength(), initializeData.fflonkProofLength); + assertEq(utilsFacet.util_getAdmin(), initializeData.admin); assertEq(utilsFacet.util_getValidator(initializeData.validatorTimelock), true); diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondInit/_DiamondInit_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondInit/_DiamondInit_Shared.t.sol similarity index 84% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondInit/_DiamondInit_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondInit/_DiamondInit_Shared.t.sol index 8a50fd5d5..79d0145dd 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondInit/_DiamondInit_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondInit/_DiamondInit_Shared.t.sol @@ -3,8 +3,8 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondProxy/DiamondProxy.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondProxy/DiamondProxy.t.sol similarity index 92% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondProxy/DiamondProxy.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondProxy/DiamondProxy.t.sol index ba1ece9db..d58d81ae5 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/DiamondProxy/DiamondProxy.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/DiamondProxy/DiamondProxy.t.sol @@ -3,17 +3,18 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; import {InitializeData} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {DiamondProxy} from "contracts/state-transition/chain-deps/DiamondProxy.sol"; -import {ZkSyncHyperchainBase} from "contracts/state-transition/chain-deps/facets/ZkSyncHyperchainBase.sol"; +import {ZKChainBase} from "contracts/state-transition/chain-deps/facets/ZKChainBase.sol"; import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; +import {FacetIsFrozen, ValueMismatch, InvalidSelector} from "contracts/common/L1ContractErrors.sol"; -contract TestFacet is ZkSyncHyperchainBase { +contract TestFacet is ZKChainBase { function func() public pure returns (bool) { return true; } @@ -59,7 +60,7 @@ contract DiamondProxyTest is Test { initCalldata: abi.encodeWithSelector(DiamondInit.initialize.selector, initializeData) }); - vm.expectRevert(abi.encodePacked("pr")); + vm.expectRevert(bytes("pr")); new DiamondProxy(block.chainid + 1, diamondCutData); } @@ -107,7 +108,7 @@ contract DiamondProxyTest is Test { DiamondProxy diamondProxy = new DiamondProxy(block.chainid, diamondCutData); TestFacet testFacet = TestFacet(address(diamondProxy)); - vm.expectRevert(abi.encodePacked("F")); + vm.expectRevert(bytes("F")); testFacet.func(); } @@ -126,7 +127,7 @@ contract DiamondProxyTest is Test { utilsFacet.util_setIsFrozen(true); - vm.expectRevert(abi.encodePacked("q1")); + vm.expectRevert(bytes("q1")); testFacet.func(); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/AcceptAdmin.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/AcceptAdmin.t.sol similarity index 88% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/AcceptAdmin.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/AcceptAdmin.t.sol index fe8c99db6..ab85ecdb0 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/AcceptAdmin.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/AcceptAdmin.t.sol @@ -3,6 +3,7 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; contract AcceptAdminTest is AdminTest { event NewPendingAdmin(address indexed oldPendingAdmin, address indexed newPendingAdmin); @@ -11,9 +12,8 @@ contract AcceptAdminTest is AdminTest { function test_revertWhen_calledByNonPendingAdmin() public { address nonPendingAdmin = makeAddr("nonPendingAdmin"); - vm.expectRevert(bytes.concat("n4")); - vm.startPrank(nonPendingAdmin); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonPendingAdmin)); adminFacet.acceptAdmin(); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/ChangeFeeParams.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/ChangeFeeParams.t.sol similarity index 78% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/ChangeFeeParams.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/ChangeFeeParams.t.sol index 1575af870..5a1f4cbb0 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/ChangeFeeParams.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/ChangeFeeParams.t.sol @@ -3,9 +3,9 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; -import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZkSyncHyperchainStorage.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {Unauthorized, PriorityTxPubdataExceedsMaxPubDataPerBatch} from "contracts/common/L1ContractErrors.sol"; contract ChangeFeeParamsTest is AdminTest { event NewFeeParams(FeeParams oldFeeParams, FeeParams newFeeParams); @@ -25,8 +25,8 @@ contract ChangeFeeParamsTest is AdminTest { ); } - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); FeeParams memory newFeeParams = FeeParams({ pubdataPricingMode: PubdataPricingMode.Rollup, batchOverheadL1Gas: 1_000_000, @@ -36,14 +36,14 @@ contract ChangeFeeParamsTest is AdminTest { minimalL2GasPrice: 250_000_000 }); - vm.startPrank(nonStateTransitionManager); - vm.expectRevert(ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER); + vm.startPrank(nonChainTypeManager); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); adminFacet.changeFeeParams(newFeeParams); } function test_revertWhen_newMaxPubdataPerBatchIsLessThanMaxPubdataPerTransaction() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); uint32 priorityTxMaxPubdata = 88_000; uint32 maxPubdataPerBatch = priorityTxMaxPubdata - 1; FeeParams memory newFeeParams = FeeParams({ @@ -55,14 +55,14 @@ contract ChangeFeeParamsTest is AdminTest { minimalL2GasPrice: 250_000_000 }); - vm.expectRevert(bytes.concat("n6")); + vm.expectRevert(PriorityTxPubdataExceedsMaxPubDataPerBatch.selector); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); adminFacet.changeFeeParams(newFeeParams); } function test_successfulChange() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); FeeParams memory oldFeeParams = utilsFacet.util_getFeeParams(); FeeParams memory newFeeParams = FeeParams({ pubdataPricingMode: PubdataPricingMode.Rollup, @@ -77,7 +77,7 @@ contract ChangeFeeParamsTest is AdminTest { vm.expectEmit(true, true, true, true, address(adminFacet)); emit NewFeeParams(oldFeeParams, newFeeParams); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); adminFacet.changeFeeParams(newFeeParams); bytes32 newFeeParamsHash = keccak256(abi.encode(newFeeParams)); diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/ExecuteUpgrade.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/ExecuteUpgrade.t.sol similarity index 84% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/ExecuteUpgrade.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/ExecuteUpgrade.t.sol index 95c6f54af..de559f27b 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/ExecuteUpgrade.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/ExecuteUpgrade.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; import {Utils} from "../../../../Utils/Utils.sol"; import {VerifierParams} from "contracts/state-transition/chain-interfaces/IVerifier.sol"; @@ -15,17 +15,17 @@ import {ProposedUpgrade} from "contracts/upgrades/BaseZkSyncUpgrade.sol"; contract ExecuteUpgradeTest is AdminTest { event ExecuteUpgrade(Diamond.DiamondCutData diamondCut); - function test_revertWhen_calledByNonGovernorOrStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); + function test_revertWhen_calledByNonGovernorOrChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ facetCuts: new Diamond.FacetCut[](0), initAddress: address(0), initCalldata: new bytes(0) }); - vm.expectRevert(ERROR_ONLY_STATE_TRANSITION_MANAGER); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); - vm.startPrank(nonStateTransitionManager); + vm.startPrank(nonChainTypeManager); adminFacet.executeUpgrade(diamondCutData); } @@ -61,8 +61,8 @@ contract ExecuteUpgradeTest is AdminTest { initCalldata: abi.encodeCall(upgrade.upgrade, (proposedUpgrade)) }); - address stm = utilsFacet.util_getStateTransitionManager(); - vm.startPrank(stm); + address ctm = utilsFacet.util_getChainTypeManager(); + vm.startPrank(ctm); adminFacet.executeUpgrade(diamondCutData); } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/FreezeDiamond.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/FreezeDiamond.t.sol new file mode 100644 index 000000000..457611105 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/FreezeDiamond.t.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {AdminTest} from "./_Admin_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; + +contract FreezeDiamondTest is AdminTest { + event Freeze(); + + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); + + vm.startPrank(nonChainTypeManager); + adminFacet.freezeDiamond(); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPendingGovernor.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPendingGovernor.t.sol similarity index 87% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPendingGovernor.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPendingGovernor.t.sol index 8dbc12bbd..359e9ce8c 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPendingGovernor.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPendingGovernor.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_ADMIN} from "../Base/_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; contract SetPendingAdminTest is AdminTest { event NewPendingAdmin(address indexed oldPendingAdmin, address indexed newPendingAdmin); @@ -12,8 +12,7 @@ contract SetPendingAdminTest is AdminTest { address nonAdmin = makeAddr("nonAdmin"); address newPendingAdmin = makeAddr("newPendingAdmin"); - vm.expectRevert(ERROR_ONLY_ADMIN); - + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonAdmin)); vm.startPrank(nonAdmin); adminFacet.setPendingAdmin(newPendingAdmin); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPorterAvailability.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPorterAvailability.t.sol similarity index 68% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPorterAvailability.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPorterAvailability.t.sol index 94e209a15..ca594b93a 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPorterAvailability.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPorterAvailability.t.sol @@ -3,23 +3,22 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; contract SetPorterAvailabilityTest is AdminTest { event IsPorterAvailableStatusUpdate(bool isPorterAvailable); - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); bool isPorterAvailable = true; - vm.expectRevert(ERROR_ONLY_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonStateTransitionManager); + vm.startPrank(nonChainTypeManager); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); adminFacet.setPorterAvailability(isPorterAvailable); } function test_setPorterAvailabilityToFalse() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); bool isPorterAvailable = false; utilsFacet.util_setZkPorterAvailability(true); @@ -28,14 +27,14 @@ contract SetPorterAvailabilityTest is AdminTest { vm.expectEmit(true, true, true, true, address(adminFacet)); emit IsPorterAvailableStatusUpdate(isPorterAvailable); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); adminFacet.setPorterAvailability(isPorterAvailable); assertEq(utilsFacet.util_getZkPorterAvailability(), isPorterAvailable); } function test_setPorterAvailabilityToTrue() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); bool isPorterAvailable = true; utilsFacet.util_setZkPorterAvailability(false); @@ -44,7 +43,7 @@ contract SetPorterAvailabilityTest is AdminTest { vm.expectEmit(true, true, true, true, address(adminFacet)); emit IsPorterAvailableStatusUpdate(isPorterAvailable); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); adminFacet.setPorterAvailability(isPorterAvailable); assertEq(utilsFacet.util_getZkPorterAvailability(), isPorterAvailable); diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPriorityTxMaxGasLimit.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPriorityTxMaxGasLimit.t.sol similarity index 65% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPriorityTxMaxGasLimit.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPriorityTxMaxGasLimit.t.sol index 8ce9e4092..e5841bc87 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetPriorityTxMaxGasLimit.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetPriorityTxMaxGasLimit.t.sol @@ -3,35 +3,33 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; import {MAX_GAS_PER_TRANSACTION} from "contracts/common/Config.sol"; +import {Unauthorized, TooMuchGas} from "contracts/common/L1ContractErrors.sol"; contract SetPriorityTxMaxGasLimitTest is AdminTest { event NewPriorityTxMaxGasLimit(uint256 oldPriorityTxMaxGasLimit, uint256 newPriorityTxMaxGasLimit); - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); uint256 newPriorityTxMaxGasLimit = 100; - vm.startPrank(nonStateTransitionManager); - vm.expectRevert(ERROR_ONLY_STATE_TRANSITION_MANAGER); - + vm.startPrank(nonChainTypeManager); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); adminFacet.setPriorityTxMaxGasLimit(newPriorityTxMaxGasLimit); } function test_revertWhen_newPriorityTxMaxGasLimitIsGreaterThanMaxGasPerTransaction() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); uint256 newPriorityTxMaxGasLimit = MAX_GAS_PER_TRANSACTION + 1; - vm.expectRevert(bytes.concat("n5")); - - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); + vm.expectRevert(TooMuchGas.selector); adminFacet.setPriorityTxMaxGasLimit(newPriorityTxMaxGasLimit); } function test_successfulSet() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); uint256 oldPriorityTxMaxGasLimit = utilsFacet.util_getPriorityTxMaxGasLimit(); uint256 newPriorityTxMaxGasLimit = 100; @@ -39,7 +37,7 @@ contract SetPriorityTxMaxGasLimitTest is AdminTest { vm.expectEmit(true, true, true, true, address(adminFacet)); emit NewPriorityTxMaxGasLimit(oldPriorityTxMaxGasLimit, newPriorityTxMaxGasLimit); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); adminFacet.setPriorityTxMaxGasLimit(newPriorityTxMaxGasLimit); assertEq(utilsFacet.util_getPriorityTxMaxGasLimit(), newPriorityTxMaxGasLimit); diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetTransactionFilterer.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetTransactionFilterer.t.sol similarity index 85% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetTransactionFilterer.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetTransactionFilterer.t.sol index 5a8ac9a2b..8581ec6c4 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetTransactionFilterer.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetTransactionFilterer.t.sol @@ -3,6 +3,7 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; contract SetTransactionFiltererTest is AdminTest { event NewTransactionFilterer(address oldTransactionFilterer, address newTransactionFilterer); @@ -34,10 +35,11 @@ contract SetTransactionFiltererTest is AdminTest { } function test_revertWhen_notAdmin() public { + address nonAdmin = makeAddr("nonAdmin"); address transactionFilterer = makeAddr("transactionFilterer"); - vm.expectRevert("Hyperchain: not admin"); - vm.startPrank(makeAddr("nonAdmin")); + vm.startPrank(nonAdmin); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonAdmin)); adminFacet.setTransactionFilterer(transactionFilterer); } } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetValidator.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetValidator.t.sol similarity index 69% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetValidator.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetValidator.t.sol index 3452ed132..5b75a0ac7 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/SetValidator.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/SetValidator.t.sol @@ -3,24 +3,23 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; contract SetValidatorTest is AdminTest { event ValidatorStatusUpdate(address indexed validatorAddress, bool isActive); - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); address validator = makeAddr("validator"); bool isActive = true; - vm.expectRevert(ERROR_ONLY_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonStateTransitionManager); + vm.startPrank(nonChainTypeManager); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); adminFacet.setValidator(validator, isActive); } function test_deactivateValidator() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); address validator = makeAddr("validator"); bool isActive = false; @@ -30,14 +29,14 @@ contract SetValidatorTest is AdminTest { vm.expectEmit(true, true, true, true, address(adminFacet)); emit ValidatorStatusUpdate(validator, isActive); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); adminFacet.setValidator(validator, isActive); assertEq(utilsFacet.util_getValidator(validator), isActive); } function test_reactivateValidator() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); + address chainTypeManager = utilsFacet.util_getChainTypeManager(); address validator = makeAddr("validator"); bool isActive = true; @@ -47,7 +46,7 @@ contract SetValidatorTest is AdminTest { vm.expectEmit(true, true, true, true, address(adminFacet)); emit ValidatorStatusUpdate(validator, isActive); - vm.startPrank(stateTransitionManager); + vm.startPrank(chainTypeManager); adminFacet.setValidator(validator, isActive); assertEq(utilsFacet.util_getValidator(validator), isActive); diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/UnfreezeDiamond.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/UnfreezeDiamond.t.sol new file mode 100644 index 000000000..88af27533 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/UnfreezeDiamond.t.sol @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {AdminTest} from "./_Admin_Shared.t.sol"; +import {Unauthorized, DiamondFreezeIncorrectState, DiamondNotFrozen} from "contracts/common/L1ContractErrors.sol"; + +contract UnfreezeDiamondTest is AdminTest { + event Unfreeze(); + + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); + vm.startPrank(nonChainTypeManager); + adminFacet.unfreezeDiamond(); + } + + function test_revertWhen_diamondIsNotFrozen() public { + address admin = utilsFacet.util_getChainTypeManager(); + + utilsFacet.util_setIsFrozen(false); + + vm.expectRevert(DiamondNotFrozen.selector); + + vm.startPrank(admin); + adminFacet.unfreezeDiamond(); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/UpgradeChainFromVersion.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/UpgradeChainFromVersion.t.sol similarity index 64% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/UpgradeChainFromVersion.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/UpgradeChainFromVersion.t.sol index 3e2155995..50de804d5 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/UpgradeChainFromVersion.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/UpgradeChainFromVersion.t.sol @@ -3,16 +3,16 @@ pragma solidity 0.8.24; import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -import {IStateTransitionManager} from "contracts/state-transition/IStateTransitionManager.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {ProtocolIdMismatch, ProtocolIdNotGreater, InvalidProtocolVersion, ValueMismatch, Unauthorized, HashMismatch} from "contracts/common/L1ContractErrors.sol"; contract UpgradeChainFromVersionTest is AdminTest { event ExecuteUpgrade(Diamond.DiamondCutData diamondCut); - function test_revertWhen_calledByNonAdminOrStateTransitionManager() public { - address nonAdminOrStateTransitionManager = makeAddr("nonAdminOrStateTransitionManager"); + function test_revertWhen_calledByNonAdminOrChainTypeManager() public { + address nonAdminOrChainTypeManager = makeAddr("nonAdminOrChainTypeManager"); uint256 oldProtocolVersion = 1; Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ facetCuts: new Diamond.FacetCut[](0), @@ -20,15 +20,14 @@ contract UpgradeChainFromVersionTest is AdminTest { initCalldata: new bytes(0) }); - vm.expectRevert(ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonAdminOrStateTransitionManager); + vm.startPrank(nonAdminOrChainTypeManager); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonAdminOrChainTypeManager)); adminFacet.upgradeChainFromVersion(oldProtocolVersion, diamondCutData); } function test_revertWhen_cutHashMismatch() public { address admin = utilsFacet.util_getAdmin(); - address stateTransitionManager = makeAddr("stateTransitionManager"); + address chainTypeManager = makeAddr("chainTypeManager"); uint256 oldProtocolVersion = 1; Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ @@ -37,24 +36,25 @@ contract UpgradeChainFromVersionTest is AdminTest { initCalldata: new bytes(0) }); - utilsFacet.util_setStateTransitionManager(stateTransitionManager); + utilsFacet.util_setChainTypeManager(chainTypeManager); bytes32 cutHashInput = keccak256("random"); vm.mockCall( - stateTransitionManager, - abi.encodeWithSelector(IStateTransitionManager.upgradeCutHash.selector), + chainTypeManager, + abi.encodeWithSelector(IChainTypeManager.upgradeCutHash.selector), abi.encode(cutHashInput) ); - vm.expectRevert("AdminFacet: cutHash mismatch"); - vm.startPrank(admin); + vm.expectRevert( + abi.encodeWithSelector(HashMismatch.selector, cutHashInput, keccak256(abi.encode(diamondCutData))) + ); adminFacet.upgradeChainFromVersion(oldProtocolVersion, diamondCutData); } function test_revertWhen_ProtocolVersionMismatchWhenUpgrading() public { address admin = utilsFacet.util_getAdmin(); - address stateTransitionManager = makeAddr("stateTransitionManager"); + address chainTypeManager = makeAddr("chainTypeManager"); uint256 oldProtocolVersion = 1; Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ @@ -64,24 +64,23 @@ contract UpgradeChainFromVersionTest is AdminTest { }); utilsFacet.util_setProtocolVersion(oldProtocolVersion + 1); - utilsFacet.util_setStateTransitionManager(stateTransitionManager); + utilsFacet.util_setChainTypeManager(chainTypeManager); bytes32 cutHashInput = keccak256(abi.encode(diamondCutData)); vm.mockCall( - stateTransitionManager, - abi.encodeWithSelector(IStateTransitionManager.upgradeCutHash.selector), + chainTypeManager, + abi.encodeWithSelector(IChainTypeManager.upgradeCutHash.selector), abi.encode(cutHashInput) ); - vm.expectRevert("AdminFacet: protocolVersion mismatch in STC when upgrading"); - vm.startPrank(admin); + vm.expectRevert(abi.encodeWithSelector(ProtocolIdMismatch.selector, uint256(2), oldProtocolVersion)); adminFacet.upgradeChainFromVersion(oldProtocolVersion, diamondCutData); } function test_revertWhen_ProtocolVersionMismatchAfterUpgrading() public { address admin = utilsFacet.util_getAdmin(); - address stateTransitionManager = makeAddr("stateTransitionManager"); + address chainTypeManager = makeAddr("chainTypeManager"); uint256 oldProtocolVersion = 1; Diamond.DiamondCutData memory diamondCutData = Diamond.DiamondCutData({ @@ -91,17 +90,16 @@ contract UpgradeChainFromVersionTest is AdminTest { }); utilsFacet.util_setProtocolVersion(oldProtocolVersion); - utilsFacet.util_setStateTransitionManager(stateTransitionManager); + utilsFacet.util_setChainTypeManager(chainTypeManager); bytes32 cutHashInput = keccak256(abi.encode(diamondCutData)); vm.mockCall( - stateTransitionManager, - abi.encodeWithSelector(IStateTransitionManager.upgradeCutHash.selector), + chainTypeManager, + abi.encodeWithSelector(IChainTypeManager.upgradeCutHash.selector), abi.encode(cutHashInput) ); - vm.expectRevert("AdminFacet: protocolVersion mismatch in STC after upgrading"); - + vm.expectRevert(ProtocolIdNotGreater.selector); // solhint-disable-next-line func-named-parameters vm.expectEmit(true, true, true, true, address(adminFacet)); emit ExecuteUpgrade(diamondCutData); diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/_Admin_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/_Admin_Shared.t.sol similarity index 91% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/_Admin_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/_Admin_Shared.t.sol index a4419a342..7c45f8e8b 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/_Admin_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Admin/_Admin_Shared.t.sol @@ -3,8 +3,8 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; @@ -36,7 +36,7 @@ contract AdminTest is Test { function setUp() public virtual { Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](2); facetCuts[0] = Diamond.FacetCut({ - facet: address(new AdminFacet()), + facet: address(new AdminFacet(block.chainid)), action: Diamond.Action.Add, isFreezable: true, selectors: getAdminSelectors() diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyBridgehub.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyBridgehub.t.sol similarity index 65% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyBridgehub.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyBridgehub.t.sol index c484a38bb..459e71b47 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyBridgehub.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyBridgehub.t.sol @@ -2,14 +2,14 @@ pragma solidity 0.8.24; -import {ZkSyncHyperchainBaseTest, ERROR_ONLY_BRIDGEHUB} from "./_Base_Shared.t.sol"; +import {ZKChainBaseTest} from "./_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; -contract OnlyBridgehubTest is ZkSyncHyperchainBaseTest { +contract OnlyBridgehubTest is ZKChainBaseTest { function test_revertWhen_calledByNonBridgehub() public { address nonBridgehub = makeAddr("nonBridgehub"); - vm.expectRevert(ERROR_ONLY_BRIDGEHUB); - + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonBridgehub)); vm.startPrank(nonBridgehub); testBaseFacet.functionWithOnlyBridgehubModifier(); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernor.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernor.t.sol similarity index 64% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernor.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernor.t.sol index ba5199f92..478372df9 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernor.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernor.t.sol @@ -2,14 +2,14 @@ pragma solidity 0.8.24; -import {ZkSyncHyperchainBaseTest, ERROR_ONLY_ADMIN} from "./_Base_Shared.t.sol"; +import {ZKChainBaseTest} from "./_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; -contract OnlyAdminTest is ZkSyncHyperchainBaseTest { +contract OnlyAdminTest is ZKChainBaseTest { function test_revertWhen_calledByNonAdmin() public { address nonAdmin = makeAddr("nonAdmin"); - vm.expectRevert(ERROR_ONLY_ADMIN); - + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonAdmin)); vm.startPrank(nonAdmin); testBaseFacet.functionWithOnlyAdminModifier(); } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernorOrStateTransitionManager.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernorOrStateTransitionManager.t.sol new file mode 100644 index 000000000..67cfe3d32 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernorOrStateTransitionManager.t.sol @@ -0,0 +1,38 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {ZKChainBaseTest} from "./_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; + +contract OnlyAdminOrChainTypeManagerTest is ZKChainBaseTest { + function test_revertWhen_calledByNonAdmin() public { + address nonAdmin = makeAddr("nonAdmin"); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonAdmin)); + vm.startPrank(nonAdmin); + testBaseFacet.functionWithOnlyAdminOrChainTypeManagerModifier(); + } + + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); + vm.startPrank(nonChainTypeManager); + testBaseFacet.functionWithOnlyAdminOrChainTypeManagerModifier(); + } + + function test_successfulCallWhenCalledByAdmin() public { + address admin = utilsFacet.util_getAdmin(); + + vm.startPrank(admin); + testBaseFacet.functionWithOnlyAdminOrChainTypeManagerModifier(); + } + + function test_successfulCallWhenCalledByChainTypeManager() public { + address chainTypeManager = utilsFacet.util_getChainTypeManager(); + + vm.startPrank(chainTypeManager); + testBaseFacet.functionWithOnlyAdminOrChainTypeManagerModifier(); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyStateTransitionManager.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyStateTransitionManager.t.sol new file mode 100644 index 000000000..b7f7ec5a3 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyStateTransitionManager.t.sol @@ -0,0 +1,23 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {ZKChainBaseTest} from "./_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; + +contract OnlyChainTypeManagerTest is ZKChainBaseTest { + function test_revertWhen_calledByNonChainTypeManager() public { + address nonChainTypeManager = makeAddr("nonChainTypeManager"); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonChainTypeManager)); + vm.startPrank(nonChainTypeManager); + testBaseFacet.functionWithOnlyChainTypeManagerModifier(); + } + + function test_successfulCall() public { + address chainTypeManager = utilsFacet.util_getChainTypeManager(); + + vm.startPrank(chainTypeManager); + testBaseFacet.functionWithOnlyChainTypeManagerModifier(); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyValidator.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyValidator.t.sol similarity index 69% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyValidator.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyValidator.t.sol index c834dd982..5997976ac 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyValidator.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/OnlyValidator.t.sol @@ -2,16 +2,16 @@ pragma solidity 0.8.24; -import {ZkSyncHyperchainBaseTest, ERROR_ONLY_VALIDATOR} from "./_Base_Shared.t.sol"; +import {ZKChainBaseTest} from "./_Base_Shared.t.sol"; +import {Unauthorized} from "contracts/common/L1ContractErrors.sol"; -contract OnlyValidatorTest is ZkSyncHyperchainBaseTest { +contract OnlyValidatorTest is ZKChainBaseTest { function test_revertWhen_calledByNonValidator() public { address nonValidator = makeAddr("nonValidator"); utilsFacet.util_setValidator(nonValidator, false); - vm.expectRevert(ERROR_ONLY_VALIDATOR); - + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, nonValidator)); vm.startPrank(nonValidator); testBaseFacet.functionWithOnlyValidatorModifier(); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/_Base_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/_Base_Shared.t.sol similarity index 59% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/_Base_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/_Base_Shared.t.sol index 15fa32883..be93c91df 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/_Base_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Base/_Base_Shared.t.sol @@ -3,41 +3,38 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -import {ZkSyncHyperchainBase} from "contracts/state-transition/chain-deps/facets/Admin.sol"; +import {ZKChainBase} from "contracts/state-transition/chain-deps/facets/Admin.sol"; import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; -contract TestBaseFacet is ZkSyncHyperchainBase { +contract TestBaseFacet is ZKChainBase { function functionWithOnlyAdminModifier() external onlyAdmin {} function functionWithOnlyValidatorModifier() external onlyValidator {} - function functionWithOnlyStateTransitionManagerModifier() external onlyStateTransitionManager {} + function functionWithOnlyChainTypeManagerModifier() external onlyChainTypeManager {} function functionWithOnlyBridgehubModifier() external onlyBridgehub {} - function functionWithOnlyAdminOrStateTransitionManagerModifier() external onlyAdminOrStateTransitionManager {} + function functionWithOnlyAdminOrChainTypeManagerModifier() external onlyAdminOrChainTypeManager {} - function functionWithonlyValidatorOrStateTransitionManagerModifier() - external - onlyValidatorOrStateTransitionManager - {} + function functionWithonlyValidatorOrChainTypeManagerModifier() external onlyValidatorOrChainTypeManager {} // add this to be excluded from coverage report function test() internal virtual {} } -bytes constant ERROR_ONLY_ADMIN = "Hyperchain: not admin"; -bytes constant ERROR_ONLY_VALIDATOR = "Hyperchain: not validator"; -bytes constant ERROR_ONLY_STATE_TRANSITION_MANAGER = "Hyperchain: not state transition manager"; -bytes constant ERROR_ONLY_BRIDGEHUB = "Hyperchain: not bridgehub"; -bytes constant ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER = "Hyperchain: Only by admin or state transition manager"; -bytes constant ERROR_ONLY_VALIDATOR_OR_STATE_TRANSITION_MANAGER = "Hyperchain: Only by validator or state transition manager"; +bytes constant ERROR_ONLY_ADMIN = "ZKChain: not admin"; +bytes constant ERROR_ONLY_VALIDATOR = "ZKChain: not validator"; +bytes constant ERROR_ONLY_STATE_TRANSITION_MANAGER = "ZKChain: not state transition manager"; +bytes constant ERROR_ONLY_BRIDGEHUB = "ZKChain: not bridgehub"; +bytes constant ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER = "ZKChain: Only by admin or state transition manager"; +bytes constant ERROR_ONLY_VALIDATOR_OR_STATE_TRANSITION_MANAGER = "ZKChain: Only by validator or state transition manager"; -contract ZkSyncHyperchainBaseTest is Test { +contract ZKChainBaseTest is Test { TestBaseFacet internal testBaseFacet; UtilsFacet internal utilsFacet; address internal testnetVerifier = address(new TestnetVerifier()); @@ -46,10 +43,10 @@ contract ZkSyncHyperchainBaseTest is Test { selectors = new bytes4[](6); selectors[0] = TestBaseFacet.functionWithOnlyAdminModifier.selector; selectors[1] = TestBaseFacet.functionWithOnlyValidatorModifier.selector; - selectors[2] = TestBaseFacet.functionWithOnlyStateTransitionManagerModifier.selector; + selectors[2] = TestBaseFacet.functionWithOnlyChainTypeManagerModifier.selector; selectors[3] = TestBaseFacet.functionWithOnlyBridgehubModifier.selector; - selectors[4] = TestBaseFacet.functionWithOnlyAdminOrStateTransitionManagerModifier.selector; - selectors[5] = TestBaseFacet.functionWithonlyValidatorOrStateTransitionManagerModifier.selector; + selectors[4] = TestBaseFacet.functionWithOnlyAdminOrChainTypeManagerModifier.selector; + selectors[5] = TestBaseFacet.functionWithonlyValidatorOrChainTypeManagerModifier.selector; } function setUp() public virtual { diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddress.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddress.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddress.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddress.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddresses.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddresses.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddresses.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/FacetAddresses.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/FacetFunctionSelectors.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/FacetFunctionSelectors.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/FacetFunctionSelectors.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/FacetFunctionSelectors.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/Facets.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/Facets.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/Facets.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/Facets.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetAdmin.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetAdmin.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetAdmin.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetAdmin.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseToken.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseToken.t.sol similarity index 70% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseToken.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseToken.t.sol index 7feed3cd1..ce0611c96 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseToken.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseToken.t.sol @@ -6,10 +6,10 @@ import {GettersFacetTest} from "./_Getters_Shared.t.sol"; contract GetBaseTokenTest is GettersFacetTest { function test() public { - address expected = makeAddr("baseToken"); + bytes32 expected = bytes32(uint256(uint160(makeAddr("baseToken")))); gettersFacetWrapper.util_setBaseToken(expected); - address received = gettersFacet.getBaseToken(); + bytes32 received = gettersFacet.getBaseTokenAssetId(); assertEq(expected, received, "BaseToken address is incorrect"); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBridgehub.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetBridgehub.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBridgehub.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetBridgehub.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetFirstUnprocessedPriorityTx.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetFirstUnprocessedPriorityTx.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetFirstUnprocessedPriorityTx.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetFirstUnprocessedPriorityTx.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2BootloaderBytecodeHash.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2BootloaderBytecodeHash.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2BootloaderBytecodeHash.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2BootloaderBytecodeHash.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2DefaultAccountBytecodeHash.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2DefaultAccountBytecodeHash.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2DefaultAccountBytecodeHash.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2DefaultAccountBytecodeHash.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBatchNumber.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBatchNumber.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBatchNumber.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBatchNumber.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBlockNumber.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBlockNumber.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBlockNumber.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeBlockNumber.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeTxHash.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeTxHash.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeTxHash.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetL2SystemContractsUpgradeTxHash.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetPendingAdmin.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetPendingAdmin.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetPendingAdmin.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetPendingAdmin.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityQueueSize.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityQueueSize.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityQueueSize.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityQueueSize.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityTxMaxGasLimit.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityTxMaxGasLimit.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityTxMaxGasLimit.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetPriorityTxMaxGasLimit.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetProtocolVersion.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetProtocolVersion.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetProtocolVersion.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetProtocolVersion.t.sol diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetStateTransitionManager.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetStateTransitionManager.t.sol new file mode 100644 index 000000000..cf8b23ef0 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetStateTransitionManager.t.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {GettersFacetTest} from "./_Getters_Shared.t.sol"; + +contract GetChainTypeManagerTest is GettersFacetTest { + function test() public { + address expected = makeAddr("chainTypeManager"); + gettersFacetWrapper.util_setChainTypeManager(expected); + + address received = gettersFacet.getChainTypeManager(); + + assertEq(expected, received, "ChainTypeManager address is incorrect"); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesCommitted.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesCommitted.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesCommitted.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesCommitted.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesExecuted.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesExecuted.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesExecuted.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesExecuted.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesVerified.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesVerified.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesVerified.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBatchesVerified.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksCommitted.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksCommitted.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksCommitted.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksCommitted.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksExecuted.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksExecuted.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksExecuted.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksExecuted.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksVerified.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksVerified.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksVerified.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalBlocksVerified.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalPriorityTxs.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalPriorityTxs.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalPriorityTxs.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetTotalPriorityTxs.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifier.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifier.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifier.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifier.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifierParams.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifierParams.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifierParams.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/GetVerifierParams.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsDiamondStorageFrozen.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsDiamondStorageFrozen.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsDiamondStorageFrozen.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsDiamondStorageFrozen.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsEthWithdrawalFinalized.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsEthWithdrawalFinalized.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsEthWithdrawalFinalized.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsEthWithdrawalFinalized.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsFacetFreezable.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsFacetFreezable.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsFacetFreezable.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsFacetFreezable.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsFunctionFreezable.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsFunctionFreezable.t.sol similarity index 85% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsFunctionFreezable.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsFunctionFreezable.t.sol index 0b257db68..4af9875e2 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsFunctionFreezable.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsFunctionFreezable.t.sol @@ -3,6 +3,7 @@ pragma solidity 0.8.24; import {GettersFacetTest} from "./_Getters_Shared.t.sol"; +import {InvalidSelector} from "contracts/common/L1ContractErrors.sol"; contract IsFunctionFreezableTest is GettersFacetTest { function test_revertWhen_facetAddressIzZero() public { @@ -11,8 +12,7 @@ contract IsFunctionFreezableTest is GettersFacetTest { gettersFacetWrapper.util_setFacetAddress(selector, address(0)); - vm.expectRevert(bytes.concat("g2")); - + vm.expectRevert(abi.encodeWithSelector(InvalidSelector.selector, selector)); gettersFacet.isFunctionFreezable(selector); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsValidator.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsValidator.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/IsValidator.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/IsValidator.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/L2LogsRootHash.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/L2LogsRootHash.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/L2LogsRootHash.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/L2LogsRootHash.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBatchHash.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBatchHash.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBatchHash.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBatchHash.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBlockHash.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBlockHash.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBlockHash.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/StoredBlockHash.t.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/_Getters_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/_Getters_Shared.t.sol similarity index 88% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/_Getters_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/_Getters_Shared.t.sol index 1d64711fe..9f66926e7 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/_Getters_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Getters/_Getters_Shared.t.sol @@ -28,16 +28,12 @@ contract GettersFacetWrapper is GettersFacet { s.bridgehub = _bridgehub; } - function util_setStateTransitionManager(address _stateTransitionManager) external { - s.stateTransitionManager = _stateTransitionManager; + function util_setChainTypeManager(address _chainTypeManager) external { + s.chainTypeManager = _chainTypeManager; } - function util_setBaseToken(address _baseToken) external { - s.baseToken = _baseToken; - } - - function util_setBaseTokenBridge(address _baseTokenBridge) external { - s.baseTokenBridge = _baseTokenBridge; + function util_setBaseToken(bytes32 _baseTokenAssetId) external { + s.baseTokenAssetId = _baseTokenAssetId; } function util_setTotalBatchesCommitted(uint256 _totalBatchesCommitted) external { @@ -53,21 +49,18 @@ contract GettersFacetWrapper is GettersFacet { } function util_setTotalPriorityTxs(uint256 _totalPriorityTxs) external { - s.priorityQueue.tail = _totalPriorityTxs; + s.priorityTree.startIndex = 0; + s.priorityTree.tree._nextLeafIndex = _totalPriorityTxs; } function util_setFirstUnprocessedPriorityTx(uint256 _firstUnprocessedPriorityTx) external { - s.priorityQueue.head = _firstUnprocessedPriorityTx; + s.priorityTree.startIndex = 0; + s.priorityTree.unprocessedIndex = _firstUnprocessedPriorityTx; } function util_setPriorityQueueSize(uint256 _priorityQueueSize) external { - s.priorityQueue.head = 0; - s.priorityQueue.tail = _priorityQueueSize; - } - - function util_setPriorityQueueFrontOperation(PriorityOperation memory _priorityQueueFrontOperation) external { - s.priorityQueue.data[s.priorityQueue.head] = _priorityQueueFrontOperation; - s.priorityQueue.tail = s.priorityQueue.head + 1; + s.priorityTree.unprocessedIndex = 1; + s.priorityTree.tree._nextLeafIndex = _priorityQueueSize + 1; } function util_setValidator(address _validator, bool _status) external { diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/BaseMailboxTests.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/BaseMailboxTests.t.sol new file mode 100644 index 000000000..230828ae7 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/BaseMailboxTests.t.sol @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {MailboxTest} from "./_Mailbox_Shared.t.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "contracts/common/Config.sol"; +import {DummyZKChain} from "contracts/dev-contracts/test/DummyZKChain.sol"; +import {BaseTokenGasPriceDenominatorNotSet} from "contracts/common/L1ContractErrors.sol"; + +contract MailboxBaseTests is MailboxTest { + function setUp() public virtual { + setupDiamondProxy(); + utilsFacet.util_setBaseTokenGasPriceMultiplierDenominator(1); + utilsFacet.util_setBaseTokenGasPriceMultiplierNominator(1); + } + + function test_mailboxConstructor() public { + DummyZKChain h = new DummyZKChain(address(0), eraChainId, block.chainid); + assertEq(h.getEraChainId(), eraChainId); + } + + function test_RevertWhen_badDenominatorInL2TransactionBaseCost() public { + utilsFacet.util_setBaseTokenGasPriceMultiplierDenominator(0); + vm.expectRevert(BaseTokenGasPriceDenominatorNotSet.selector); + mailboxFacet.l2TransactionBaseCost(100, 10000, REQUIRED_L2_GAS_PRICE_PER_PUBDATA); + } + + function test_successful_getL2TransactionBaseCostPricingModeValidium() public { + uint256 gasPrice = 10000000; + uint256 l2GasLimit = 1000000; + uint256 l2GasPerPubdataByteLimit = REQUIRED_L2_GAS_PRICE_PER_PUBDATA; + + FeeParams memory feeParams = FeeParams({ + pubdataPricingMode: PubdataPricingMode.Validium, + batchOverheadL1Gas: 1000000, + maxPubdataPerBatch: 120000, + maxL2GasPerBatch: 80000000, + priorityTxMaxPubdata: 99000, + minimalL2GasPrice: 250000000 + }); + + utilsFacet.util_setFeeParams(feeParams); + + // this was get from running the function, but more reasonable would be to + // have some invariants that the calculation should keep for min required gas + // price and also gas limit + uint256 l2TransactionBaseCost = 250125000000000; + + assertEq( + mailboxFacet.l2TransactionBaseCost(gasPrice, l2GasLimit, l2GasPerPubdataByteLimit), + l2TransactionBaseCost + ); + } + + function test_successful_getL2TransactionBaseCostPricingModeRollup() public { + uint256 gasPrice = 10000000; + uint256 l2GasLimit = 1000000; + uint256 l2GasPerPubdataByteLimit = REQUIRED_L2_GAS_PRICE_PER_PUBDATA; + + FeeParams memory feeParams = FeeParams({ + pubdataPricingMode: PubdataPricingMode.Rollup, + batchOverheadL1Gas: 1000000, + maxPubdataPerBatch: 120000, + maxL2GasPerBatch: 80000000, + priorityTxMaxPubdata: 99000, + minimalL2GasPrice: 250000000 + }); + + utilsFacet.util_setFeeParams(feeParams); + + // this was get from running the function, but more reasonable would be to + // have some invariants that the calculation should keep for min required gas + // price and also gas limit + uint256 l2TransactionBaseCost = 250125000000000; + + assertEq( + mailboxFacet.l2TransactionBaseCost(gasPrice, l2GasLimit, l2GasPerPubdataByteLimit), + l2TransactionBaseCost + ); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Mailbox/BridgehubRequestL2Transaction.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/BridgehubRequestL2Transaction.t.sol similarity index 61% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Mailbox/BridgehubRequestL2Transaction.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/BridgehubRequestL2Transaction.t.sol index d34a6bd7d..f435ecfbd 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Mailbox/BridgehubRequestL2Transaction.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/BridgehubRequestL2Transaction.t.sol @@ -4,12 +4,17 @@ pragma solidity 0.8.24; import {MailboxTest} from "./_Mailbox_Shared.t.sol"; import {BridgehubL2TransactionRequest} from "contracts/common/Messaging.sol"; -import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA} from "contracts/common/Config.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, MAX_NEW_FACTORY_DEPS} from "contracts/common/Config.sol"; import {TransactionFiltererTrue} from "contracts/dev-contracts/test/DummyTransactionFiltererTrue.sol"; import {TransactionFiltererFalse} from "contracts/dev-contracts/test/DummyTransactionFiltererFalse.sol"; +import {TransactionNotAllowed, Unauthorized} from "contracts/common/L1ContractErrors.sol"; -contract BridgehubRequestL2TransactionTest is MailboxTest { - function test_successWithoutFilterer() public { +contract MailboxBridgehubRequestL2TransactionTest is MailboxTest { + function setUp() public virtual { + setupDiamondProxy(); + } + + function test_success_withoutFilterer() public { address bridgehub = makeAddr("bridgehub"); utilsFacet.util_setBridgehub(bridgehub); @@ -24,7 +29,7 @@ contract BridgehubRequestL2TransactionTest is MailboxTest { assertTrue(canonicalTxHash != bytes32(0), "canonicalTxHash should not be 0"); } - function test_successWithFilterer() public { + function test_success_withFilterer() public { address bridgehub = makeAddr("bridgehub"); TransactionFiltererTrue tf = new TransactionFiltererTrue(); @@ -54,7 +59,17 @@ contract BridgehubRequestL2TransactionTest is MailboxTest { vm.deal(bridgehub, 100 ether); vm.prank(address(bridgehub)); - vm.expectRevert(bytes("tf")); + vm.expectRevert(TransactionNotAllowed.selector); + mailboxFacet.bridgehubRequestL2Transaction(req); + } + + function test_revertWhen_notBridgehub() public { + address bridgehub = makeAddr("bridgehub"); + utilsFacet.util_setBridgehub(bridgehub); + BridgehubL2TransactionRequest memory req = getBridgehubRequestL2TransactionRequest(); + vm.deal(bridgehub, 100 ether); + vm.prank(address(sender)); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, sender)); mailboxFacet.bridgehubRequestL2Transaction(req); } @@ -74,4 +89,25 @@ contract BridgehubRequestL2TransactionTest is MailboxTest { refundRecipient: sender }); } + + function test_priorityTreeRootChange() public { + bytes32 oldRootHash = gettersFacet.getPriorityTreeRoot(); + assertEq(oldRootHash, bytes32(0), "root hash should be 0"); + + address bridgehub = makeAddr("bridgehub"); + + utilsFacet.util_setBridgehub(bridgehub); + utilsFacet.util_setBaseTokenGasPriceMultiplierDenominator(1); + utilsFacet.util_setPriorityTxMaxGasLimit(100000000); + + BridgehubL2TransactionRequest memory req = getBridgehubRequestL2TransactionRequest(); + + vm.deal(bridgehub, 100 ether); + vm.prank(address(bridgehub)); + bytes32 canonicalTxHash = mailboxFacet.bridgehubRequestL2Transaction(req); + assertTrue(canonicalTxHash != bytes32(0), "canonicalTxHash should not be 0"); + + bytes32 newRootHash = gettersFacet.getPriorityTreeRoot(); + assertEq(canonicalTxHash, newRootHash, "root hash should have changed"); + } } diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/FinalizeWithdrawal.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/FinalizeWithdrawal.t.sol new file mode 100644 index 000000000..5e7fa27f6 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/FinalizeWithdrawal.t.sol @@ -0,0 +1,65 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {MailboxTest} from "./_Mailbox_Shared.t.sol"; +import {DummyBridgehub} from "contracts/dev-contracts/test/DummyBridgehub.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {DummySharedBridge} from "contracts/dev-contracts/test/DummySharedBridge.sol"; +import {OnlyEraSupported} from "contracts/common/L1ContractErrors.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; + +contract MailboxFinalizeWithdrawal is MailboxTest { + bytes32[] proof; + bytes message; + DummySharedBridge L1AssetRouter; + address baseTokenBridgeAddress; + + function setUp() public virtual { + setupDiamondProxy(); + + L1AssetRouter = new DummySharedBridge(keccak256("dummyDepositHash")); + baseTokenBridgeAddress = address(L1AssetRouter); + + vm.mockCall(bridgehub, abi.encodeCall(Bridgehub.sharedBridge, ()), abi.encode(baseTokenBridgeAddress)); + + proof = new bytes32[](0); + message = "message"; + } + + function test_RevertWhen_notEra() public { + utilsFacet.util_setChainId(eraChainId + 1); + + vm.expectRevert(OnlyEraSupported.selector); + mailboxFacet.finalizeEthWithdrawal({ + _l2BatchNumber: 0, + _l2MessageIndex: 0, + _l2TxNumberInBatch: 0, + _message: message, + _merkleProof: proof + }); + } + + function test_success_withdrawal(uint256 amount) public { + utilsFacet.util_setChainId(eraChainId); + + address l1Receiver = makeAddr("receiver"); + address l1Token = address(1); + vm.deal(baseTokenBridgeAddress, amount); + + bytes memory message = abi.encode(l1Receiver, l1Token, amount); + + mailboxFacet.finalizeEthWithdrawal({ + _l2BatchNumber: 0, + _l2MessageIndex: 0, + _l2TxNumberInBatch: 0, + _message: message, + _merkleProof: proof + }); + + assertEq(l1Receiver.balance, amount); + assertEq(baseTokenBridgeAddress.balance, 0); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/ProvingL2LogsInclusion.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/ProvingL2LogsInclusion.t.sol new file mode 100644 index 000000000..e04335d04 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/ProvingL2LogsInclusion.t.sol @@ -0,0 +1,391 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {MailboxTest} from "./_Mailbox_Shared.t.sol"; +import {L2Message, L2Log} from "contracts/common/Messaging.sol"; +import "forge-std/Test.sol"; +import {L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, L1_GAS_PER_PUBDATA_BYTE, L2_TO_L1_LOG_SERIALIZE_SIZE} from "contracts/common/Config.sol"; +import {L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, L2_BOOTLOADER_ADDRESS} from "contracts/common/L2ContractAddresses.sol"; +import {Merkle} from "contracts/common/libraries/Merkle.sol"; +import {BatchNotExecuted, HashedLogIsDefault} from "contracts/common/L1ContractErrors.sol"; +import {MurkyBase} from "murky/common/MurkyBase.sol"; +import {MerkleTest} from "contracts/dev-contracts/test/MerkleTest.sol"; +import {TxStatus} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {MerkleTreeNoSort} from "test/foundry/l1/unit/concrete/common/libraries/Merkle/MerkleTreeNoSort.sol"; + +contract MailboxL2LogsProve is MailboxTest { + bytes32[] elements; + MerkleTest merkle; + MerkleTreeNoSort merkleTree; + bytes data; + uint256 batchNumber; + bool isService; + uint8 shardId; + + function setUp() public virtual { + setupDiamondProxy(); + + data = abi.encodePacked("test data"); + merkleTree = new MerkleTreeNoSort(); + merkle = new MerkleTest(); + batchNumber = gettersFacet.getTotalBatchesExecuted(); + isService = true; + shardId = 0; + } + + function _addHashedLogToMerkleTree( + uint8 _shardId, + bool _isService, + uint16 _txNumberInBatch, + address _sender, + bytes32 _key, + bytes32 _value + ) internal returns (uint256 index) { + elements.push(keccak256(abi.encodePacked(_shardId, _isService, _txNumberInBatch, _sender, _key, _value))); + + index = elements.length - 1; + } + + function test_RevertWhen_batchNumberGreaterThanBatchesExecuted() public { + L2Message memory message = L2Message({txNumberInBatch: 0, sender: sender, data: data}); + bytes32[] memory proof = _appendProofMetadata(new bytes32[](1)); + + _proveL2MessageInclusion({ + _batchNumber: batchNumber + 1, + _index: 0, + _message: message, + _proof: proof, + _expectedError: abi.encodeWithSelector(BatchNotExecuted.selector, batchNumber + 1) + }); + } + + function test_success_proveL2MessageInclusion() public { + uint256 firstLogIndex = _addHashedLogToMerkleTree({ + _shardId: 0, + _isService: true, + _txNumberInBatch: 0, + _sender: L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, + _key: bytes32(uint256(uint160(sender))), + _value: keccak256(data) + }); + + uint256 secondLogIndex = _addHashedLogToMerkleTree({ + _shardId: 0, + _isService: true, + _txNumberInBatch: 1, + _sender: L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, + _key: bytes32(uint256(uint160(sender))), + _value: keccak256(data) + }); + + // Calculate the Merkle root + bytes32 root = merkleTree.getRoot(elements); + utilsFacet.util_setL2LogsRootHash(batchNumber, root); + + // Create L2 message + L2Message memory message = L2Message({txNumberInBatch: 0, sender: sender, data: data}); + + // Get Merkle proof for the first element + bytes32[] memory firstLogProof = merkleTree.getProof(elements, firstLogIndex); + + { + // Calculate the root using the Merkle proof + bytes32 leaf = elements[firstLogIndex]; + bytes32 calculatedRoot = merkle.calculateRoot(firstLogProof, firstLogIndex, leaf); + + // Assert that the calculated root matches the expected root + assertEq(calculatedRoot, root); + } + + // Prove L2 message inclusion + bool ret = _proveL2MessageInclusion(batchNumber, firstLogIndex, message, firstLogProof, bytes("")); + + // Assert that the proof was successful + assertEq(ret, true); + + // Prove L2 message inclusion for wrong leaf + ret = _proveL2MessageInclusion(batchNumber, secondLogIndex, message, firstLogProof, bytes("")); + + // Assert that the proof has failed + assertEq(ret, false); + } + + function test_success_proveL2LogInclusion() public { + uint256 firstLogIndex = _addHashedLogToMerkleTree({ + _shardId: shardId, + _isService: isService, + _txNumberInBatch: 0, + _sender: L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, + _key: bytes32(uint256(uint160(sender))), + _value: keccak256(data) + }); + + uint256 secondLogIndex = _addHashedLogToMerkleTree({ + _shardId: shardId, + _isService: isService, + _txNumberInBatch: 1, + _sender: L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, + _key: bytes32(uint256(uint160(sender))), + _value: keccak256(data) + }); + + L2Log memory log = L2Log({ + l2ShardId: shardId, + isService: isService, + txNumberInBatch: 1, + sender: L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, + key: bytes32(uint256(uint160(sender))), + value: keccak256(data) + }); + + // Calculate the Merkle root + bytes32 root = merkleTree.getRoot(elements); + // Set root hash for current batch + utilsFacet.util_setL2LogsRootHash(batchNumber, root); + + // Get Merkle proof for the first element + bytes32[] memory secondLogProof = merkleTree.getProof(elements, secondLogIndex); + + { + // Calculate the root using the Merkle proof + bytes32 leaf = elements[secondLogIndex]; + + bytes32 calculatedRoot = merkle.calculateRoot(secondLogProof, secondLogIndex, leaf); + // Assert that the calculated root matches the expected root + assertEq(calculatedRoot, root); + } + + // Prove l2 log inclusion with correct proof + bool ret = _proveL2LogInclusion({ + _batchNumber: batchNumber, + _index: secondLogIndex, + _proof: secondLogProof, + _log: log, + _expectedError: bytes("") + }); + + // Assert that the proof was successful + assertEq(ret, true); + + // Prove l2 log inclusion with wrong proof + ret = _proveL2LogInclusion({ + _batchNumber: batchNumber, + _index: firstLogIndex, + _proof: secondLogProof, + _log: log, + _expectedError: bytes("") + }); + + // Assert that the proof was successful + assertEq(ret, false); + } + + // this is not possible in case of message, because some default values + // are set during translation from message to log + function test_RevertWhen_proveL2LogInclusionDefaultLog() public { + L2Log memory log = L2Log({ + l2ShardId: 0, + isService: false, + txNumberInBatch: 0, + sender: address(0), + key: bytes32(0), + value: bytes32(0) + }); + + uint256 firstLogIndex = _addHashedLogToMerkleTree({ + _shardId: 0, + _isService: true, + _txNumberInBatch: 1, + _sender: L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, + _key: bytes32(uint256(uint160(sender))), + _value: keccak256(data) + }); + + // Add first element to the Merkle tree + elements.push(keccak256(new bytes(L2_TO_L1_LOG_SERIALIZE_SIZE))); + uint256 secondLogIndex = 1; + + // Calculate the Merkle root + bytes32 root = merkleTree.getRoot(elements); + // Set root hash for current batch + utilsFacet.util_setL2LogsRootHash(batchNumber, root); + + // Get Merkle proof for the first element + bytes32[] memory secondLogProof = merkleTree.getProof(elements, secondLogIndex); + + { + // Calculate the root using the Merkle proof + bytes32 leaf = elements[secondLogIndex]; + bytes32 calculatedRoot = merkle.calculateRoot(secondLogProof, secondLogIndex, leaf); + // Assert that the calculated root matches the expected root + assertEq(calculatedRoot, root); + } + + // Prove log inclusion reverts + _proveL2LogInclusion( + batchNumber, + secondLogIndex, + log, + secondLogProof, + bytes.concat(HashedLogIsDefault.selector) + ); + } + + function test_success_proveL1ToL2TransactionStatus() public { + bytes32 firstL2TxHash = keccak256("firstL2Transaction"); + bytes32 secondL2TxHash = keccak256("SecondL2Transaction"); + TxStatus txStatus = TxStatus.Success; + + uint256 firstLogIndex = _addHashedLogToMerkleTree({ + _shardId: shardId, + _isService: isService, + _txNumberInBatch: 0, + _sender: L2_BOOTLOADER_ADDRESS, + _key: firstL2TxHash, + _value: bytes32(uint256(txStatus)) + }); + + uint256 secondLogIndex = _addHashedLogToMerkleTree({ + _shardId: shardId, + _isService: isService, + _txNumberInBatch: 1, + _sender: L2_BOOTLOADER_ADDRESS, + _key: secondL2TxHash, + _value: bytes32(uint256(txStatus)) + }); + + // Calculate the Merkle root + bytes32 root = merkleTree.getRoot(elements); + // Set root hash for current batch + utilsFacet.util_setL2LogsRootHash(batchNumber, root); + + // Get Merkle proof for the first element + bytes32[] memory secondLogProof = merkleTree.getProof(elements, secondLogIndex); + + { + // Calculate the root using the Merkle proof + bytes32 leaf = elements[secondLogIndex]; + bytes32 calculatedRoot = merkle.calculateRoot(secondLogProof, secondLogIndex, leaf); + // Assert that the calculated root matches the expected root + assertEq(calculatedRoot, root); + } + + // Prove L1 to L2 transaction status + bool ret = _proveL1ToL2TransactionStatus({ + _l2TxHash: secondL2TxHash, + _l2BatchNumber: batchNumber, + _l2MessageIndex: secondLogIndex, + _l2TxNumberInBatch: 1, + _merkleProof: secondLogProof, + _status: txStatus + }); + // Assert that the proof was successful + assertEq(ret, true); + } + + /// @notice Proves L1 to L2 transaction status and cross-checks new and old encoding + function _proveL1ToL2TransactionStatus( + bytes32 _l2TxHash, + uint256 _l2BatchNumber, + uint256 _l2MessageIndex, + uint16 _l2TxNumberInBatch, + bytes32[] memory _merkleProof, + TxStatus _status + ) internal returns (bool) { + bool retOldEncoding = mailboxFacet.proveL1ToL2TransactionStatus({ + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _merkleProof, + _status: _status + }); + bool retNewEncoding = mailboxFacet.proveL1ToL2TransactionStatus({ + _l2TxHash: _l2TxHash, + _l2BatchNumber: _l2BatchNumber, + _l2MessageIndex: _l2MessageIndex, + _l2TxNumberInBatch: _l2TxNumberInBatch, + _merkleProof: _appendProofMetadata(_merkleProof), + _status: _status + }); + + assertEq(retOldEncoding, retNewEncoding); + + return retOldEncoding; + } + + /// @notice Proves L2 log inclusion and cross-checks new and old encoding + function _proveL2LogInclusion( + uint256 _batchNumber, + uint256 _index, + L2Log memory _log, + bytes32[] memory _proof, + bytes memory _expectedError + ) internal returns (bool) { + if (_expectedError.length > 0) { + vm.expectRevert(_expectedError); + } + bool retOldEncoding = mailboxFacet.proveL2LogInclusion({ + _batchNumber: _batchNumber, + _index: _index, + _proof: _proof, + _log: _log + }); + + if (_expectedError.length > 0) { + vm.expectRevert(_expectedError); + } + bool retNewEncoding = mailboxFacet.proveL2LogInclusion({ + _batchNumber: _batchNumber, + _index: _index, + _proof: _appendProofMetadata(_proof), + _log: _log + }); + + assertEq(retOldEncoding, retNewEncoding); + return retOldEncoding; + } + + function _proveL2MessageInclusion( + uint256 _batchNumber, + uint256 _index, + L2Message memory _message, + bytes32[] memory _proof, + bytes memory _expectedError + ) internal returns (bool) { + if (_expectedError.length > 0) { + vm.expectRevert(_expectedError); + } + bool retOldEncoding = mailboxFacet.proveL2MessageInclusion({ + _batchNumber: _batchNumber, + _index: _index, + _message: _message, + _proof: _proof + }); + + if (_expectedError.length > 0) { + vm.expectRevert(_expectedError); + } + bool retNewEncoding = mailboxFacet.proveL2MessageInclusion({ + _batchNumber: _batchNumber, + _index: _index, + _message: _message, + _proof: _appendProofMetadata(_proof) + }); + + assertEq(retOldEncoding, retNewEncoding); + return retOldEncoding; + } + + /// @notice Appends the proof metadata to the log proof as if the proof is for a batch that settled on L1. + function _appendProofMetadata(bytes32[] memory logProof) internal returns (bytes32[] memory result) { + result = new bytes32[](logProof.length + 1); + + result[0] = bytes32(bytes.concat(bytes1(0x01), bytes1(uint8(logProof.length)), bytes30(0x00))); + for (uint256 i = 0; i < logProof.length; i++) { + result[i + 1] = logProof[i]; + } + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/RequestL2Transaction.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/RequestL2Transaction.t.sol new file mode 100644 index 000000000..0ea16b46c --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/RequestL2Transaction.t.sol @@ -0,0 +1,154 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {MailboxTest} from "./_Mailbox_Shared.t.sol"; +import {BridgehubL2TransactionRequest} from "contracts/common/Messaging.sol"; +import {REQUIRED_L2_GAS_PRICE_PER_PUBDATA, MAX_NEW_FACTORY_DEPS, ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; +import {TransactionFiltererTrue} from "contracts/dev-contracts/test/DummyTransactionFiltererTrue.sol"; +import {TransactionFiltererFalse} from "contracts/dev-contracts/test/DummyTransactionFiltererFalse.sol"; +import {FeeParams, PubdataPricingMode} from "contracts/state-transition/chain-deps/ZKChainStorage.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {DummySharedBridge} from "contracts/dev-contracts/test/DummySharedBridge.sol"; +import {OnlyEraSupported, TooManyFactoryDeps, MsgValueTooLow, GasPerPubdataMismatch} from "contracts/common/L1ContractErrors.sol"; +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; + +contract MailboxRequestL2TransactionTest is MailboxTest { + address tempAddress; + bytes[] tempBytesArr; + bytes tempBytes; + DummySharedBridge l1SharedBridge; + address baseTokenBridgeAddress; + + function setUp() public virtual { + setupDiamondProxy(); + + l1SharedBridge = new DummySharedBridge(keccak256("dummyDepositHash")); + baseTokenBridgeAddress = address(l1SharedBridge); + vm.mockCall(bridgehub, abi.encodeCall(Bridgehub.sharedBridge, ()), abi.encode(baseTokenBridgeAddress)); + + tempAddress = makeAddr("temp"); + tempBytesArr = new bytes[](0); + tempBytes = ""; + utilsFacet.util_setChainId(eraChainId); + } + + function test_RevertWhen_NotEra(uint256 randomChainId) public { + vm.assume(eraChainId != randomChainId); + + utilsFacet.util_setChainId(randomChainId); + + vm.expectRevert(OnlyEraSupported.selector); + mailboxFacet.requestL2Transaction({ + _contractL2: tempAddress, + _l2Value: 0, + _calldata: tempBytes, + _l2GasLimit: 0, + _l2GasPerPubdataByteLimit: 0, + _factoryDeps: tempBytesArr, + _refundRecipient: tempAddress + }); + } + + function test_RevertWhen_wrongL2GasPerPubdataByteLimit() public { + vm.expectRevert(GasPerPubdataMismatch.selector); + mailboxFacet.requestL2Transaction({ + _contractL2: tempAddress, + _l2Value: 0, + _calldata: tempBytes, + _l2GasLimit: 0, + _l2GasPerPubdataByteLimit: 0, + _factoryDeps: tempBytesArr, + _refundRecipient: tempAddress + }); + } + + function test_RevertWhen_msgValueDoesntCoverTx() public { + utilsFacet.util_setBaseTokenGasPriceMultiplierDenominator(1); + tempBytesArr = new bytes[](1); + + uint256 baseCost = mailboxFacet.l2TransactionBaseCost(10000000, 1000000, REQUIRED_L2_GAS_PRICE_PER_PUBDATA); + uint256 l2Value = 1 ether; + uint256 mintValue = baseCost + l2Value; + + vm.expectRevert(abi.encodeWithSelector(MsgValueTooLow.selector, mintValue, mintValue - 1)); + mailboxFacet.requestL2Transaction{value: mintValue - 1}({ + _contractL2: tempAddress, + _l2Value: l2Value, + _calldata: tempBytes, + _l2GasLimit: 1000000, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _factoryDeps: tempBytesArr, + _refundRecipient: tempAddress + }); + } + + function test_RevertWhen_factoryDepsLengthExceeded() public { + tempBytesArr = new bytes[](MAX_NEW_FACTORY_DEPS + 1); + + vm.expectRevert(TooManyFactoryDeps.selector); + mailboxFacet.requestL2Transaction({ + _contractL2: tempAddress, + _l2Value: 0, + _calldata: tempBytes, + _l2GasLimit: 0, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _factoryDeps: tempBytesArr, + _refundRecipient: tempAddress + }); + } + + function _requestL2Transaction( + uint256 amount, + uint256 baseCost, + uint256 l2GasLimit + ) internal returns (bytes32 canonicalTxHash, uint256 mintValue) { + bytes[] memory factoryDeps = new bytes[](1); + factoryDeps[0] = "11111111111111111111111111111111"; + + mintValue = baseCost + amount; + + vm.deal(sender, mintValue); + vm.prank(sender); + canonicalTxHash = mailboxFacet.requestL2Transaction{value: mintValue}({ + _contractL2: tempAddress, + _l2Value: amount, + _calldata: tempBytes, + _l2GasLimit: l2GasLimit, + _l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + _factoryDeps: factoryDeps, + _refundRecipient: tempAddress + }); + } + + function test_RevertWhen_bridgePaused(uint256 randomValue) public { + utilsFacet.util_setBaseTokenGasPriceMultiplierDenominator(1); + utilsFacet.util_setPriorityTxMaxGasLimit(100000000); + + uint256 l2GasLimit = 1000000; + uint256 baseCost = mailboxFacet.l2TransactionBaseCost(10000000, l2GasLimit, REQUIRED_L2_GAS_PRICE_PER_PUBDATA); + randomValue = bound(randomValue, 0, type(uint256).max - baseCost); + + l1SharedBridge.pause(); + + vm.expectRevert("Pausable: paused"); + _requestL2Transaction(randomValue, baseCost, l2GasLimit); + } + + function test_success_requestL2Transaction(uint256 randomValue) public { + utilsFacet.util_setBaseTokenGasPriceMultiplierDenominator(1); + utilsFacet.util_setPriorityTxMaxGasLimit(100000000); + + uint256 l2GasLimit = 1000000; + uint256 baseCost = mailboxFacet.l2TransactionBaseCost(10000000, l2GasLimit, REQUIRED_L2_GAS_PRICE_PER_PUBDATA); + randomValue = bound(randomValue, 0, type(uint256).max - baseCost); + + bytes32 canonicalTxHash; + uint256 mintValue; + + (canonicalTxHash, mintValue) = _requestL2Transaction(randomValue, baseCost, l2GasLimit); + assertTrue(canonicalTxHash != bytes32(0), "canonicalTxHash should not be 0"); + assertEq(baseTokenBridgeAddress.balance, mintValue); + assertEq(l1SharedBridge.chainBalance(eraChainId, ETH_TOKEN_ADDRESS), mintValue); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Mailbox/_Mailbox_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/_Mailbox_Shared.t.sol similarity index 53% rename from l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Mailbox/_Mailbox_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/_Mailbox_Shared.t.sol index 03ab74a8d..37755b08e 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Mailbox/_Mailbox_Shared.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/chain-deps/facets/Mailbox/_Mailbox_Shared.t.sol @@ -3,37 +3,37 @@ pragma solidity 0.8.24; import {Test} from "forge-std/Test.sol"; -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; - +import {Utils} from "foundry-test/l1/unit/concrete/Utils/Utils.sol"; +import {UtilsFacet} from "foundry-test/l1/unit/concrete/Utils/UtilsFacet.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; import {MailboxFacet} from "contracts/state-transition/chain-deps/facets/Mailbox.sol"; +import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; +import {IGetters} from "contracts/state-transition/chain-interfaces/IGetters.sol"; import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; contract MailboxTest is Test { IMailbox internal mailboxFacet; UtilsFacet internal utilsFacet; + IGetters internal gettersFacet; address sender; - uint256 eraChainId = 9; + uint256 constant eraChainId = 9; address internal testnetVerifier = address(new TestnetVerifier()); + address diamondProxy; + address bridgehub; - function getMailboxSelectors() public pure returns (bytes4[] memory) { - bytes4[] memory selectors = new bytes4[](1); - selectors[0] = IMailbox.bridgehubRequestL2Transaction.selector; - return selectors; - } - - function setUp() public virtual { + function setupDiamondProxy() public virtual { sender = makeAddr("sender"); + bridgehub = makeAddr("bridgehub"); vm.deal(sender, 100 ether); - Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](2); + Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](3); facetCuts[0] = Diamond.FacetCut({ - facet: address(new MailboxFacet(eraChainId)), + facet: address(new MailboxFacet(eraChainId, block.chainid)), action: Diamond.Action.Add, isFreezable: true, - selectors: getMailboxSelectors() + selectors: Utils.getMailboxSelectors() }); facetCuts[1] = Diamond.FacetCut({ facet: address(new UtilsFacet()), @@ -41,10 +41,19 @@ contract MailboxTest is Test { isFreezable: true, selectors: Utils.getUtilsFacetSelectors() }); + facetCuts[2] = Diamond.FacetCut({ + facet: address(new GettersFacet()), + action: Diamond.Action.Add, + isFreezable: true, + selectors: Utils.getGettersSelectors() + }); - address diamondProxy = Utils.makeDiamondProxy(facetCuts, testnetVerifier); + diamondProxy = Utils.makeDiamondProxy(facetCuts, testnetVerifier); mailboxFacet = IMailbox(diamondProxy); utilsFacet = UtilsFacet(diamondProxy); + gettersFacet = IGetters(diamondProxy); + + utilsFacet.util_setBridgehub(bridgehub); } // add this to be excluded from coverage report diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/data-availability/CalldataDA.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/data-availability/CalldataDA.t.sol new file mode 100644 index 000000000..24657a499 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/data-availability/CalldataDA.t.sol @@ -0,0 +1,181 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {Utils} from "../../Utils/Utils.sol"; +import {TestCalldataDA} from "contracts/dev-contracts/test/TestCalldataDA.sol"; +import {BLOB_SIZE_BYTES} from "contracts/state-transition/data-availability/CalldataDA.sol"; + +contract CalldataDATest is Test { + TestCalldataDA calldataDA; + + function setUp() public { + calldataDA = new TestCalldataDA(); + } + + /*////////////////////////////////////////////////////////////////////////// + CalldataDA::_processL2RollupDAValidatorOutputHash + //////////////////////////////////////////////////////////////////////////*/ + + function test_RevertWhen_OperatorInputTooSmall() public { + bytes32 l2DAValidatorOutputHash = Utils.randomBytes32("l2DAValidatorOutputHash"); + uint256 maxBlobsSupported = 1; + bytes memory operatorDAInput = hex""; + + vm.expectRevert("too small"); + calldataDA.processL2RollupDAValidatorOutputHash(l2DAValidatorOutputHash, maxBlobsSupported, operatorDAInput); + } + + function test_RevertWhen_InvalidNumberOfBlobs() public { + bytes32 l2DAValidatorOutputHash = Utils.randomBytes32("l2DAValidatorOutputHash"); + uint256 maxBlobsSupported = 1; + + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + bytes32 fullPubdataHash = Utils.randomBytes32("fullPubdataHash"); + uint8 blobsProvided = 8; + + bytes memory operatorDAInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided); + + vm.expectRevert("invalid number of blobs"); + calldataDA.processL2RollupDAValidatorOutputHash(l2DAValidatorOutputHash, maxBlobsSupported, operatorDAInput); + } + + function test_RevertWhen_InvalidBlobHashes() public { + bytes32 l2DAValidatorOutputHash = Utils.randomBytes32("l2DAValidatorOutputHash"); + uint256 maxBlobsSupported = 1; + + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + bytes32 fullPubdataHash = Utils.randomBytes32("fullPubdataHash"); + uint8 blobsProvided = 1; + + bytes memory operatorDAInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided); + + vm.expectRevert("invalid blobs hashes"); + calldataDA.processL2RollupDAValidatorOutputHash(l2DAValidatorOutputHash, maxBlobsSupported, operatorDAInput); + } + + function test_RevertWhen_InvaliL2DAOutputHash() public { + bytes32 l2DAValidatorOutputHash = Utils.randomBytes32("l2DAValidatorOutputHash"); + uint256 maxBlobsSupported = 1; + + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + bytes32 fullPubdataHash = Utils.randomBytes32("fullPubdataHash"); + uint8 blobsProvided = 1; + bytes32 blobLinearHash = Utils.randomBytes32("blobLinearHash"); + + bytes memory operatorDAInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided, blobLinearHash); + + vm.expectRevert("invalid l2 DA output hash"); + calldataDA.processL2RollupDAValidatorOutputHash(l2DAValidatorOutputHash, maxBlobsSupported, operatorDAInput); + } + + function test_ProcessL2RollupDAValidatorOutputHash() public { + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + bytes32 fullPubdataHash = Utils.randomBytes32("fullPubdataHash"); + uint8 blobsProvided = 1; + bytes32 blobLinearHash = Utils.randomBytes32("blobLinearHash"); + + bytes memory daInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided, blobLinearHash); + bytes memory l1DaInput = "verifydonttrust"; + + bytes32 l2DAValidatorOutputHash = keccak256(daInput); + + bytes memory operatorDAInput = abi.encodePacked(daInput, l1DaInput); + + ( + bytes32 outputStateDiffHash, + bytes32 outputFullPubdataHash, + bytes32[] memory blobsLinearHashes, + uint256 outputBlobsProvided, + bytes memory outputL1DaInput + ) = calldataDA.processL2RollupDAValidatorOutputHash(l2DAValidatorOutputHash, blobsProvided, operatorDAInput); + + assertEq(outputStateDiffHash, stateDiffHash, "stateDiffHash"); + assertEq(outputFullPubdataHash, fullPubdataHash, "fullPubdataHash"); + assertEq(blobsLinearHashes.length, 1, "blobsLinearHashesLength"); + assertEq(blobsLinearHashes[0], blobLinearHash, "blobsLinearHashes"); + assertEq(outputL1DaInput, l1DaInput, "l1DaInput"); + } + + /*////////////////////////////////////////////////////////////////////////// + CalldataDA::_processCalldataDA + //////////////////////////////////////////////////////////////////////////*/ + + function test_RevertWhen_OnlyOneBlobWithCalldata(uint256 blobsProvided) public { + vm.assume(blobsProvided != 1); + bytes32 fullPubdataHash = Utils.randomBytes32("fullPubdataHash"); + uint256 maxBlobsSupported = 6; + bytes memory pubdataInput = ""; + + vm.expectRevert("only one blob with calldata"); + calldataDA.processCalldataDA(blobsProvided, fullPubdataHash, maxBlobsSupported, pubdataInput); + } + + function test_RevertWhen_PubdataTooBig() public { + uint256 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes calldata pubdataInput = makeBytesArrayOfLength(BLOB_SIZE_BYTES + 33); + bytes32 fullPubdataHash = keccak256(pubdataInput); + + vm.expectRevert(bytes("cz")); + calldataDA.processCalldataDA(blobsProvided, fullPubdataHash, maxBlobsSupported, pubdataInput); + } + + function test_RevertWhen_PubdataTooSmall() public { + uint256 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes calldata pubdataInput = makeBytesArrayOfLength(31); + bytes32 fullPubdataHash = keccak256(pubdataInput); + + vm.expectRevert(bytes("pubdata too small")); + calldataDA.processCalldataDA(blobsProvided, fullPubdataHash, maxBlobsSupported, pubdataInput); + } + + function test_RevertWhen_PubdataDoesntMatchPubdataHash() public { + uint256 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes memory pubdataInputWithoutBlobCommitment = "verifydonttrustzkistheendgamemagicmoonmath"; + bytes32 blobCommitment = Utils.randomBytes32("blobCommitment"); + bytes memory pubdataInput = abi.encodePacked(pubdataInputWithoutBlobCommitment, blobCommitment); + bytes32 fullPubdataHash = keccak256(pubdataInput); + + vm.expectRevert(bytes("wp")); + calldataDA.processCalldataDA(blobsProvided, fullPubdataHash, maxBlobsSupported, pubdataInput); + } + + function test_ProcessCalldataDA() public { + uint256 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes memory pubdataInputWithoutBlobCommitment = "verifydonttrustzkistheendgamemagicmoonmath"; + bytes32 blobCommitment = Utils.randomBytes32("blobCommitment"); + bytes memory pubdataInput = abi.encodePacked(pubdataInputWithoutBlobCommitment, blobCommitment); + bytes32 fullPubdataHash = keccak256(pubdataInputWithoutBlobCommitment); + + (bytes32[] memory blobCommitments, bytes memory pubdata) = calldataDA.processCalldataDA( + blobsProvided, + fullPubdataHash, + maxBlobsSupported, + pubdataInput + ); + + assertEq(blobCommitments.length, 6, "blobCommitmentsLength"); + assertEq(blobCommitments[0], blobCommitment, "blobCommitment1"); + assertEq(blobCommitments[1], bytes32(0), "blobCommitment2"); + assertEq(blobCommitments[2], bytes32(0), "blobCommitment3"); + assertEq(blobCommitments[3], bytes32(0), "blobCommitment4"); + assertEq(blobCommitments[4], bytes32(0), "blobCommitment5"); + assertEq(blobCommitments[5], bytes32(0), "blobCommitment6"); + assertEq(pubdata, pubdataInputWithoutBlobCommitment, "pubdata"); + } + + /*////////////////////////////////////////////////////////////////////////// + Util Functions + //////////////////////////////////////////////////////////////////////////*/ + + function makeBytesArrayOfLength(uint256 len) internal returns (bytes calldata arr) { + assembly { + arr.length := len + } + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/data-availability/RelayedSLDAValidator.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/data-availability/RelayedSLDAValidator.t.sol new file mode 100644 index 000000000..9d896cf08 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/data-availability/RelayedSLDAValidator.t.sol @@ -0,0 +1,132 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Test, console} from "forge-std/Test.sol"; +import {Utils} from "../../Utils/Utils.sol"; +import {RelayedSLDAValidator} from "contracts/state-transition/data-availability/RelayedSLDAValidator.sol"; +import {L1DAValidatorOutput, PubdataSource} from "contracts/state-transition/chain-interfaces/IL1DAValidator.sol"; +import {L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IL1Messenger} from "contracts/common/interfaces/IL1Messenger.sol"; +import {L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +contract RelayedSLDAValidatorTest is Test { + uint256 constant CHAIN_ID = 193; + address constant CHAIN_ADDRESS = address(0x1234); + RelayedSLDAValidator daValidator; + + function setUp() public { + daValidator = new RelayedSLDAValidator(); + vm.etch(L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, abi.encode(address(daValidator))); + vm.mockCall( + L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, + abi.encodeWithSelector(IL1Messenger.sendToL1.selector), + abi.encode(bytes32(0)) + ); + vm.mockCall( + L2_BRIDGEHUB_ADDR, + abi.encodeWithSelector(IBridgehub.getZKChain.selector, (CHAIN_ID)), + abi.encode(CHAIN_ADDRESS) + ); + } + + /*////////////////////////////////////////////////////////////////////////// + RelayedSLDAValidator::checkDA + //////////////////////////////////////////////////////////////////////////*/ + + function test_RevertWhen_InvalidPubdataSource() public { + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + bytes32 fullPubdataHash = Utils.randomBytes32("fullPubdataHash"); + uint8 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes32 blobLinearHash = Utils.randomBytes32("blobLinearHash"); + + bytes memory daInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided, blobLinearHash); + bytes memory l1DaInput = "verifydonttrust"; + + bytes32 l2DAValidatorOutputHash = keccak256(daInput); + + bytes memory operatorDAInput = abi.encodePacked(daInput, l1DaInput); + + vm.prank(CHAIN_ADDRESS); + vm.expectRevert("l1-da-validator/invalid-pubdata-source"); + daValidator.checkDA(CHAIN_ID, 0, l2DAValidatorOutputHash, operatorDAInput, maxBlobsSupported); + } + + function test_revertWhen_PubdataInputTooSmall() public { + bytes memory pubdata = "verifydont"; + console.logBytes(pubdata); + + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + uint8 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes32 blobLinearHash = Utils.randomBytes32("blobLinearHash"); + uint8 pubdataSource = uint8(PubdataSource.Calldata); + bytes memory l1DaInput = "verifydonttrust"; + bytes32 fullPubdataHash = keccak256(pubdata); + + bytes memory daInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided, blobLinearHash); + + bytes32 l2DAValidatorOutputHash = keccak256(daInput); + + bytes memory operatorDAInput = abi.encodePacked(daInput, pubdataSource, l1DaInput); + + vm.prank(CHAIN_ADDRESS); + vm.expectRevert("pubdata too small"); + daValidator.checkDA(CHAIN_ID, 0, l2DAValidatorOutputHash, operatorDAInput, maxBlobsSupported); + } + + function test_revertWhenInvalidSender() public { + bytes memory pubdata = "verifydont"; + console.logBytes(pubdata); + + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + uint8 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes32 blobLinearHash = Utils.randomBytes32("blobLinearHash"); + uint8 pubdataSource = uint8(PubdataSource.Calldata); + bytes memory l1DaInput = "verifydonttrust"; + bytes32 fullPubdataHash = keccak256(pubdata); + + bytes memory daInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided, blobLinearHash); + + bytes32 l2DAValidatorOutputHash = keccak256(daInput); + + bytes memory operatorDAInput = abi.encodePacked(daInput, pubdataSource, l1DaInput); + + vm.expectRevert("l1-da-validator/invalid-sender"); + daValidator.checkDA(CHAIN_ID, 0, l2DAValidatorOutputHash, operatorDAInput, maxBlobsSupported); + } + + function test_checkDA() public { + bytes memory pubdata = "verifydont"; + console.logBytes(pubdata); + + bytes32 stateDiffHash = Utils.randomBytes32("stateDiffHash"); + uint8 blobsProvided = 1; + uint256 maxBlobsSupported = 6; + bytes32 blobLinearHash = Utils.randomBytes32("blobLinearHash"); + uint8 pubdataSource = uint8(PubdataSource.Calldata); + bytes memory l1DaInput = "verifydonttrustzkistheendgamemagicmoonmath"; + bytes32 fullPubdataHash = keccak256(pubdata); + + bytes memory daInput = abi.encodePacked(stateDiffHash, fullPubdataHash, blobsProvided, blobLinearHash); + + bytes32 l2DAValidatorOutputHash = keccak256(daInput); + + bytes memory operatorDAInput = abi.encodePacked(daInput, pubdataSource, l1DaInput); + + vm.prank(CHAIN_ADDRESS); + L1DAValidatorOutput memory output = daValidator.checkDA( + CHAIN_ID, + 0, + l2DAValidatorOutputHash, + operatorDAInput, + maxBlobsSupported + ); + assertEq(output.stateDiffHash, stateDiffHash, "stateDiffHash"); + assertEq(output.blobsLinearHashes.length, maxBlobsSupported, "blobsLinearHashesLength"); + assertEq(output.blobsOpeningCommitments.length, maxBlobsSupported, "blobsOpeningCommitmentsLength"); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/OnEmptyQueue.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/OnEmptyQueue.sol similarity index 78% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/OnEmptyQueue.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/OnEmptyQueue.sol index 7881409fc..753d5e33c 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/OnEmptyQueue.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/OnEmptyQueue.sol @@ -3,6 +3,7 @@ pragma solidity 0.8.24; import {PriorityQueueSharedTest} from "./_PriorityQueue_Shared.t.sol"; +import {QueueIsEmpty} from "contracts/common/L1ContractErrors.sol"; contract OnEmptyQueueTest is PriorityQueueSharedTest { function test_gets() public { @@ -13,12 +14,12 @@ contract OnEmptyQueueTest is PriorityQueueSharedTest { } function test_failGetFront() public { - vm.expectRevert(bytes("D")); + vm.expectRevert(QueueIsEmpty.selector); priorityQueue.front(); } function test_failPopFront() public { - vm.expectRevert(bytes("s")); + vm.expectRevert(QueueIsEmpty.selector); priorityQueue.popFront(); } } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/PopOperations.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/PopOperations.sol similarity index 96% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/PopOperations.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/PopOperations.sol index f2f7d73ba..5e43f6284 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/PopOperations.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/PopOperations.sol @@ -4,6 +4,7 @@ pragma solidity 0.8.24; import {PriorityQueueSharedTest} from "./_PriorityQueue_Shared.t.sol"; import {PriorityOperation} from "contracts/dev-contracts/test/PriorityQueueTest.sol"; +import {QueueIsEmpty} from "contracts/common/L1ContractErrors.sol"; contract PopOperationsTest is PriorityQueueSharedTest { uint256 public constant NUMBER_OPERATIONS = 10; @@ -67,7 +68,7 @@ contract PopOperationsTest is PriorityQueueSharedTest { assertTrue(priorityQueue.isEmpty()); // And now let's go over the limit and fail. - vm.expectRevert(bytes.concat("s")); + vm.expectRevert(QueueIsEmpty.selector); priorityQueue.popFront(); } } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/PushOperations.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/PushOperations.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/PushOperations.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/PushOperations.sol diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/_PriorityQueue_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/_PriorityQueue_Shared.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/PriorityQueue/_PriorityQueue_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityQueue/_PriorityQueue_Shared.t.sol diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityTree/PriorityTree.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityTree/PriorityTree.t.sol new file mode 100644 index 000000000..447e4e47d --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityTree/PriorityTree.t.sol @@ -0,0 +1,104 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {PriorityTreeSharedTest, PriorityOpsBatchInfo} from "./_PriorityTree_Shared.t.sol"; +import {PriorityTreeCommitment} from "contracts/common/Config.sol"; + +bytes32 constant ZERO_LEAF_HASH = keccak256(""); + +contract PriorityTreeTest is PriorityTreeSharedTest { + function test_gets() public { + assertEq(0, priorityTree.getSize()); + assertEq(0, priorityTree.getFirstUnprocessedPriorityTx()); + assertEq(0, priorityTree.getTotalPriorityTxs()); + assertEq(bytes32(0), priorityTree.getRoot()); + } + + function test_push() public { + bytes32 leaf1 = keccak256(abi.encode(1)); + bytes32 leaf2 = keccak256(abi.encode(2)); + + priorityTree.push(leaf1); + + assertEq(1, priorityTree.getSize()); + assertEq(0, priorityTree.getFirstUnprocessedPriorityTx()); + assertEq(1, priorityTree.getTotalPriorityTxs()); + assertEq(leaf1, priorityTree.getRoot()); + + priorityTree.push(leaf2); + + assertEq(2, priorityTree.getSize()); + assertEq(0, priorityTree.getFirstUnprocessedPriorityTx()); + assertEq(2, priorityTree.getTotalPriorityTxs()); + + bytes32 expectedRoot = keccak256(abi.encode(leaf1, leaf2)); + assertEq(expectedRoot, priorityTree.getRoot()); + } + + function test_processEmptyBatch() public { + pushMockEntries(3); + + assertEq(0, priorityTree.getFirstUnprocessedPriorityTx()); + priorityTree.processBatch( + PriorityOpsBatchInfo({ + leftPath: new bytes32[](0), + rightPath: new bytes32[](0), + itemHashes: new bytes32[](0) + }) + ); + + assertEq(0, priorityTree.getFirstUnprocessedPriorityTx()); + } + + function test_processBatch() public { + bytes32[] memory leaves = pushMockEntries(3); + assertEq(0, priorityTree.getFirstUnprocessedPriorityTx()); + + // 2 batches with: 1 tx, 2 txs. + + bytes32[] memory leftPath = new bytes32[](2); + bytes32[] memory rightPath = new bytes32[](2); + rightPath[0] = leaves[1]; + rightPath[1] = keccak256(abi.encode(leaves[2], ZERO_LEAF_HASH)); + bytes32[] memory batch1 = new bytes32[](1); + batch1[0] = leaves[0]; + + priorityTree.processBatch(PriorityOpsBatchInfo({leftPath: leftPath, rightPath: rightPath, itemHashes: batch1})); + + assertEq(1, priorityTree.getFirstUnprocessedPriorityTx()); + + leftPath[0] = leaves[0]; + rightPath[0] = ZERO_LEAF_HASH; + rightPath[1] = bytes32(0); + bytes32[] memory batch2 = new bytes32[](2); + batch2[0] = leaves[1]; + batch2[1] = leaves[2]; + + priorityTree.processBatch(PriorityOpsBatchInfo({leftPath: leftPath, rightPath: rightPath, itemHashes: batch2})); + + assertEq(3, priorityTree.getFirstUnprocessedPriorityTx()); + } + + function test_processBatch_shouldRevert() public { + bytes32[] memory itemHashes = pushMockEntries(3); + + vm.expectRevert("PT: root mismatch"); + priorityTree.processBatch( + PriorityOpsBatchInfo({leftPath: new bytes32[](2), rightPath: new bytes32[](2), itemHashes: itemHashes}) + ); + } + + function test_commitDecommit() public { + pushMockEntries(3); + bytes32 root = priorityTree.getRoot(); + + PriorityTreeCommitment memory commitment = priorityTree.getCommitment(); + priorityTree.initFromCommitment(commitment); + + assertEq(0, priorityTree.getFirstUnprocessedPriorityTx()); + assertEq(3, priorityTree.getTotalPriorityTxs()); + assertEq(root, priorityTree.getRoot()); + assertEq(ZERO_LEAF_HASH, priorityTree.getZero()); + } +} diff --git a/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityTree/_PriorityTree_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityTree/_PriorityTree_Shared.t.sol new file mode 100644 index 000000000..0a39e74d1 --- /dev/null +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/PriorityTree/_PriorityTree_Shared.t.sol @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {PriorityTreeTest, PriorityOpsBatchInfo} from "contracts/dev-contracts/test/PriorityTreeTest.sol"; + +contract PriorityTreeSharedTest is Test { + PriorityTreeTest internal priorityTree; + + constructor() { + priorityTree = new PriorityTreeTest(); + } + + // Pushes 'count' entries into the priority tree. + function pushMockEntries(uint256 count) public returns (bytes32[] memory) { + bytes32[] memory hashes = new bytes32[](count); + for (uint256 i = 0; i < count; ++i) { + bytes32 hash = keccak256(abi.encode(i)); + hashes[i] = hash; + priorityTree.push(hash); + } + return hashes; + } + + // add this to be excluded from coverage report + function test() internal virtual {} +} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/ValidateL1L2Tx.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/ValidateL1L2Tx.t.sol similarity index 88% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/ValidateL1L2Tx.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/ValidateL1L2Tx.t.sol index bb78a71b5..8016b62f4 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/ValidateL1L2Tx.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/ValidateL1L2Tx.t.sol @@ -4,6 +4,7 @@ pragma solidity 0.8.24; import {TransactionValidatorSharedTest} from "./_TransactionValidator_Shared.t.sol"; import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; +import {PubdataGreaterThanLimit, TxnBodyGasLimitNotEnoughGas, ValidateTxnNotEnoughGas, NotEnoughGas, TooMuchGas, InvalidPubdataLength} from "contracts/common/L1ContractErrors.sol"; contract ValidateL1L2TxTest is TransactionValidatorSharedTest { function test_BasicRequestL1L2() public pure { @@ -12,11 +13,11 @@ contract ValidateL1L2TxTest is TransactionValidatorSharedTest { validateL1ToL2Transaction(testTx, 500000, 100000); } - function test_RevertWhen_GasLimitDoesntCoverOverhead() public { + function test_RevertWhen_GasLimitdoesntCoverOverhead() public { L2CanonicalTransaction memory testTx = createTestTransaction(); // The limit is so low, that it doesn't even cover the overhead testTx.gasLimit = 0; - vm.expectRevert(bytes("my")); + vm.expectRevert(TxnBodyGasLimitNotEnoughGas.selector); validateL1ToL2Transaction(testTx, 500000, 100000); } @@ -27,7 +28,7 @@ contract ValidateL1L2TxTest is TransactionValidatorSharedTest { // before checking that it is below the max gas limit. uint256 priorityTxMaxGasLimit = 500000; testTx.gasLimit = priorityTxMaxGasLimit + 1000000; - vm.expectRevert(bytes("ui")); + vm.expectRevert(TooMuchGas.selector); validateL1ToL2Transaction(testTx, priorityTxMaxGasLimit, 100000); } @@ -41,7 +42,7 @@ contract ValidateL1L2TxTest is TransactionValidatorSharedTest { // So if the pubdata costs per byte is 1 - then this transaction could produce 500k of pubdata. // (hypothetically, assuming all the gas was spent on writing). testTx.gasPerPubdataByteLimit = 1; - vm.expectRevert(bytes("uk")); + vm.expectRevert(abi.encodeWithSelector(PubdataGreaterThanLimit.selector, 100000, 490000)); validateL1ToL2Transaction(testTx, priorityTxMaxGasLimit, 100000); } @@ -49,7 +50,7 @@ contract ValidateL1L2TxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createTestTransaction(); uint256 priorityTxMaxGasLimit = 500000; testTx.gasLimit = 200000; - vm.expectRevert(bytes("up")); + vm.expectRevert(ValidateTxnNotEnoughGas.selector); validateL1ToL2Transaction(testTx, priorityTxMaxGasLimit, 100000); } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/ValidateUpgradeTransaction.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/ValidateUpgradeTransaction.t.sol similarity index 75% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/ValidateUpgradeTransaction.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/ValidateUpgradeTransaction.t.sol index df9a8f7eb..f3ac8238c 100644 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/ValidateUpgradeTransaction.t.sol +++ b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/ValidateUpgradeTransaction.t.sol @@ -5,6 +5,7 @@ import {TransactionValidatorSharedTest} from "./_TransactionValidator_Shared.t.s import {L2CanonicalTransaction} from "contracts/common/Messaging.sol"; import {TransactionValidator} from "contracts/state-transition/libraries/TransactionValidator.sol"; +import {InvalidUpgradeTxn, UpgradeTxVerifyParam} from "contracts/common/L1ContractErrors.sol"; contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { function test_BasicRequest() public pure { @@ -16,7 +17,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // only system contracts (address < 2^16) are allowed to send upgrade transactions. testTx.from = uint256(1000000000); - vm.expectRevert(bytes("ua")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.From)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -24,7 +25,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // Now the 'to' address it too large. testTx.to = uint256(type(uint160).max) + 100; - vm.expectRevert(bytes("ub")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.To)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -32,7 +33,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // Paymaster must be 0 - otherwise we revert. testTx.paymaster = 1; - vm.expectRevert(bytes("uc")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.Paymaster)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -40,7 +41,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // Value must be 0 - otherwise we revert. testTx.value = 1; - vm.expectRevert(bytes("ud")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.Value)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -48,7 +49,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // reserved 0 must be 0 - otherwise we revert. testTx.reserved[0] = 1; - vm.expectRevert(bytes("ue")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.Reserved0)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -56,7 +57,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // reserved 1 must be a valid address testTx.reserved[1] = uint256(type(uint160).max) + 100; - vm.expectRevert(bytes("uf")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.Reserved1)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -64,7 +65,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // reserved 2 must be 0 - otherwise we revert. testTx.reserved[2] = 1; - vm.expectRevert(bytes("ug")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.Reserved2)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -72,7 +73,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // reserved 3 be 0 - otherwise we revert. testTx.reserved[3] = 1; - vm.expectRevert(bytes("uo")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.Reserved3)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -80,7 +81,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // Signature must be 0 - otherwise we revert. testTx.signature = bytes("hello"); - vm.expectRevert(bytes("uh")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.Signature)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -88,7 +89,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // PaymasterInput must be 0 - otherwise we revert. testTx.paymasterInput = bytes("hi"); - vm.expectRevert(bytes("ul1")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.PaymasterInput)); TransactionValidator.validateUpgradeTransaction(testTx); } @@ -96,7 +97,7 @@ contract ValidateUpgradeTxTest is TransactionValidatorSharedTest { L2CanonicalTransaction memory testTx = createUpgradeTransaction(); // ReservedDynamic must be 0 - otherwise we revert. testTx.reservedDynamic = bytes("something"); - vm.expectRevert(bytes("um")); + vm.expectRevert(abi.encodeWithSelector(InvalidUpgradeTxn.selector, UpgradeTxVerifyParam.ReservedDynamic)); TransactionValidator.validateUpgradeTransaction(testTx); } } diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/_TransactionValidator_Shared.t.sol b/l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/_TransactionValidator_Shared.t.sol similarity index 100% rename from l1-contracts/test/foundry/unit/concrete/state-transition/libraries/TransactionValidator/_TransactionValidator_Shared.t.sol rename to l1-contracts/test/foundry/l1/unit/concrete/state-transition/libraries/TransactionValidator/_TransactionValidator_Shared.t.sol diff --git a/l1-contracts/test/foundry/l2/integration/L2ERC20BridgeTest.t.sol b/l1-contracts/test/foundry/l2/integration/L2ERC20BridgeTest.t.sol new file mode 100644 index 000000000..8fe0ffcc8 --- /dev/null +++ b/l1-contracts/test/foundry/l2/integration/L2ERC20BridgeTest.t.sol @@ -0,0 +1,49 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR} from "contracts/common/L2ContractAddresses.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; +import {SharedL2ContractL1DeployerUtils} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractL1DeployerUtils.sol"; +import {L2Utils, SystemContractsArgs} from "./L2Utils.sol"; +import {SharedL2ContractL2DeployerUtils} from "./_SharedL2ContractL2DeployerUtils.sol"; +import {L2Erc20TestAbstract} from "../../l1/integration/l2-tests-in-l1-context/L2Erc20TestAbstract.t.sol"; +import {SharedL2ContractDeployer} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractDeployer.sol"; + +contract L2Erc20Test is Test, L2Erc20TestAbstract, SharedL2ContractL2DeployerUtils { + function test() internal virtual override(DeployUtils, SharedL2ContractL2DeployerUtils) {} + + function initSystemContracts( + SystemContractsArgs memory _args + ) internal override(SharedL2ContractDeployer, SharedL2ContractL2DeployerUtils) { + super.initSystemContracts(_args); + } + + function deployViaCreate2( + bytes memory creationCode, + bytes memory constructorArgs + ) internal override(DeployUtils, SharedL2ContractL2DeployerUtils) returns (address) { + return super.deployViaCreate2(creationCode, constructorArgs); + } + + function deployL2Contracts( + uint256 _l1ChainId + ) public override(SharedL2ContractL1DeployerUtils, SharedL2ContractDeployer) { + super.deployL2Contracts(_l1ChainId); + } +} diff --git a/l1-contracts/test/foundry/l2/integration/L2GatewayTests.t.sol b/l1-contracts/test/foundry/l2/integration/L2GatewayTests.t.sol new file mode 100644 index 000000000..b1ae7bb5f --- /dev/null +++ b/l1-contracts/test/foundry/l2/integration/L2GatewayTests.t.sol @@ -0,0 +1,60 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; +import "forge-std/console.sol"; + +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {IL2NativeTokenVault} from "contracts/bridge/ntv/IL2NativeTokenVault.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; + +import {L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {ETH_TOKEN_ADDRESS, SETTLEMENT_LAYER_RELAY_SENDER} from "contracts/common/Config.sol"; + +import {AddressAliasHelper} from "contracts/vendor/AddressAliasHelper.sol"; +import {BridgehubMintCTMAssetData} from "contracts/bridgehub/IBridgehub.sol"; +import {IAdmin} from "contracts/state-transition/chain-interfaces/IAdmin.sol"; +import {IL2AssetRouter} from "contracts/bridge/asset-router/IL2AssetRouter.sol"; +import {IL1Nullifier} from "contracts/bridge/interfaces/IL1Nullifier.sol"; +import {IL1AssetRouter} from "contracts/bridge/asset-router/IL1AssetRouter.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {L2Utils, SystemContractsArgs} from "./L2Utils.sol"; + +import {SharedL2ContractL2DeployerUtils} from "./_SharedL2ContractL2DeployerUtils.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {IZKChain} from "contracts/state-transition/chain-interfaces/IZKChain.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; +import {SharedL2ContractL1DeployerUtils} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractL1DeployerUtils.sol"; +import {L2GatewayTestAbstract} from "../../l1/integration/l2-tests-in-l1-context/L2GatewayTestAbstract.t.sol"; +import {SharedL2ContractDeployer} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractDeployer.sol"; + +contract L2GatewayTests is Test, L2GatewayTestAbstract, SharedL2ContractL2DeployerUtils { + // We need to emulate a L1->L2 transaction from the L1 bridge to L2 counterpart. + // It is a bit easier to use EOA and it is sufficient for the tests. + function test() internal virtual override(DeployUtils, SharedL2ContractL2DeployerUtils) {} + + function initSystemContracts( + SystemContractsArgs memory _args + ) internal override(SharedL2ContractDeployer, SharedL2ContractL2DeployerUtils) { + super.initSystemContracts(_args); + } + + function deployViaCreate2( + bytes memory creationCode, + bytes memory constructorArgs + ) internal override(DeployUtils, SharedL2ContractL2DeployerUtils) returns (address) { + return super.deployViaCreate2(creationCode, constructorArgs); + } + + function deployL2Contracts( + uint256 _l1ChainId + ) public override(SharedL2ContractL1DeployerUtils, SharedL2ContractDeployer) { + super.deployL2Contracts(_l1ChainId); + } +} diff --git a/l1-contracts/test/foundry/l2/integration/L2Utils.sol b/l1-contracts/test/foundry/l2/integration/L2Utils.sol new file mode 100644 index 000000000..d10105734 --- /dev/null +++ b/l1-contracts/test/foundry/l2/integration/L2Utils.sol @@ -0,0 +1,238 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +import {Vm} from "forge-std/Vm.sol"; +import "forge-std/console.sol"; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; +import {BeaconProxy} from "@openzeppelin/contracts-v4/proxy/beacon/BeaconProxy.sol"; +import {DEPLOYER_SYSTEM_CONTRACT, L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR, L2_BRIDGEHUB_ADDR, L2_MESSAGE_ROOT_ADDR} from "contracts/common/L2ContractAddresses.sol"; +import {IContractDeployer, L2ContractHelper} from "contracts/common/libraries/L2ContractHelper.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; + +import {L2AssetRouter} from "contracts/bridge/asset-router/L2AssetRouter.sol"; +import {L2NativeTokenVault} from "contracts/bridge/ntv/L2NativeTokenVault.sol"; +import {L2SharedBridgeLegacy} from "contracts/bridge/L2SharedBridgeLegacy.sol"; +import {IMessageRoot} from "contracts/bridgehub/IMessageRoot.sol"; +import {ICTMDeploymentTracker} from "contracts/bridgehub/ICTMDeploymentTracker.sol"; +import {Bridgehub, IBridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {MessageRoot} from "contracts/bridgehub/MessageRoot.sol"; + +import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; + +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {BridgedStandardERC20} from "contracts/bridge/BridgedStandardERC20.sol"; + +import {SystemContractsCaller} from "contracts/common/libraries/SystemContractsCaller.sol"; +import {DeployFailed} from "contracts/common/L1ContractErrors.sol"; +import {SystemContractsArgs} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractDeployer.sol"; + +library L2Utils { + address internal constant VM_ADDRESS = address(uint160(uint256(keccak256("hevm cheat code")))); + Vm internal constant vm = Vm(VM_ADDRESS); + + address internal constant L2_FORCE_DEPLOYER_ADDR = address(0x8007); + + string internal constant L2_ASSET_ROUTER_PATH = "./zkout/L2AssetRouter.sol/L2AssetRouter.json"; + string internal constant L2_NATIVE_TOKEN_VAULT_PATH = "./zkout/L2NativeTokenVault.sol/L2NativeTokenVault.json"; + string internal constant BRIDGEHUB_PATH = "./zkout/Bridgehub.sol/Bridgehub.json"; + + /// @notice Returns the bytecode of a given era contract from a `zkout` folder. + function readEraBytecode(string memory _filename) internal returns (bytes memory bytecode) { + string memory artifact = vm.readFile( + // solhint-disable-next-line func-named-parameters + string.concat("./zkout/", _filename, ".sol/", _filename, ".json") + ); + + bytecode = vm.parseJsonBytes(artifact, ".bytecode.object"); + } + + /// @notice Returns the bytecode of a given system contract. + function readSystemContractsBytecode(string memory _filename) internal view returns (bytes memory) { + string memory file = vm.readFile( + // solhint-disable-next-line func-named-parameters + string.concat( + "../system-contracts/artifacts-zk/contracts-preprocessed/", + _filename, + ".sol/", + _filename, + ".json" + ) + ); + bytes memory bytecode = vm.parseJson(file, "$.bytecode"); + return bytecode; + } + + /** + * @dev Initializes the system contracts. + * @dev It is a hack needed to make the tests be able to call system contracts directly. + */ + function initSystemContracts(SystemContractsArgs memory _args) internal { + bytes memory contractDeployerBytecode = readSystemContractsBytecode("ContractDeployer"); + vm.etch(DEPLOYER_SYSTEM_CONTRACT, contractDeployerBytecode); + forceDeploySystemContracts(_args); + } + + function forceDeploySystemContracts(SystemContractsArgs memory _args) internal { + forceDeployMessageRoot(); + forceDeployBridgehub( + _args.l1ChainId, + _args.eraChainId, + _args.aliasedOwner, + _args.l1AssetRouter, + _args.legacySharedBridge, + _args.l1CtmDeployer + ); + forceDeployAssetRouter( + _args.l1ChainId, + _args.eraChainId, + _args.aliasedOwner, + _args.l1AssetRouter, + _args.legacySharedBridge + ); + forceDeployNativeTokenVault( + _args.l1ChainId, + _args.aliasedOwner, + _args.l2TokenProxyBytecodeHash, + _args.legacySharedBridge, + _args.l2TokenBeacon, + _args.contractsDeployedAlready + ); + } + + function forceDeployMessageRoot() internal { + new MessageRoot(IBridgehub(L2_BRIDGEHUB_ADDR)); + forceDeployWithConstructor("MessageRoot", L2_MESSAGE_ROOT_ADDR, abi.encode(L2_BRIDGEHUB_ADDR)); + } + + function forceDeployBridgehub( + uint256 _l1ChainId, + uint256 _eraChainId, + address _aliasedOwner, + address _l1AssetRouter, + address _legacySharedBridge, + address _l1CtmDeployer + ) internal { + new Bridgehub(_l1ChainId, _aliasedOwner, 100); + forceDeployWithConstructor("Bridgehub", L2_BRIDGEHUB_ADDR, abi.encode(_l1ChainId, _aliasedOwner, 100)); + Bridgehub bridgehub = Bridgehub(L2_BRIDGEHUB_ADDR); + vm.prank(_aliasedOwner); + bridgehub.setAddresses( + L2_ASSET_ROUTER_ADDR, + ICTMDeploymentTracker(_l1CtmDeployer), + IMessageRoot(L2_MESSAGE_ROOT_ADDR) + ); + } + + /// @notice Deploys the L2AssetRouter contract. + /// @param _l1ChainId The chain ID of the L1 chain. + /// @param _eraChainId The chain ID of the era chain. + /// @param _l1AssetRouter The address of the L1 asset router. + /// @param _legacySharedBridge The address of the legacy shared bridge. + function forceDeployAssetRouter( + uint256 _l1ChainId, + uint256 _eraChainId, + address _aliasedOwner, + address _l1AssetRouter, + address _legacySharedBridge + ) internal { + // to ensure that the bytecode is known + bytes32 ethAssetId = DataEncoding.encodeNTVAssetId(_l1ChainId, ETH_TOKEN_ADDRESS); + { + new L2AssetRouter(_l1ChainId, _eraChainId, _l1AssetRouter, _legacySharedBridge, ethAssetId, _aliasedOwner); + } + forceDeployWithConstructor( + "L2AssetRouter", + L2_ASSET_ROUTER_ADDR, + abi.encode(_l1ChainId, _eraChainId, _l1AssetRouter, _legacySharedBridge, ethAssetId, _aliasedOwner) + ); + } + + /// @notice Deploys the L2NativeTokenVault contract. + /// @param _l1ChainId The chain ID of the L1 chain. + /// @param _aliasedOwner The address of the aliased owner. + /// @param _l2TokenProxyBytecodeHash The hash of the L2 token proxy bytecode. + /// @param _legacySharedBridge The address of the legacy shared bridge. + /// @param _l2TokenBeacon The address of the L2 token beacon. + /// @param _contractsDeployedAlready Whether the contracts are deployed already. + function forceDeployNativeTokenVault( + uint256 _l1ChainId, + address _aliasedOwner, + bytes32 _l2TokenProxyBytecodeHash, + address _legacySharedBridge, + address _l2TokenBeacon, + bool _contractsDeployedAlready + ) internal { + // to ensure that the bytecode is known + bytes32 ethAssetId = DataEncoding.encodeNTVAssetId(_l1ChainId, ETH_TOKEN_ADDRESS); + { + new L2NativeTokenVault({ + _l1ChainId: _l1ChainId, + _aliasedOwner: _aliasedOwner, + _l2TokenProxyBytecodeHash: _l2TokenProxyBytecodeHash, + _legacySharedBridge: _legacySharedBridge, + _bridgedTokenBeacon: _l2TokenBeacon, + _contractsDeployedAlready: _contractsDeployedAlready, + _wethToken: address(0), + _baseTokenAssetId: ethAssetId + }); + } + forceDeployWithConstructor( + "L2NativeTokenVault", + L2_NATIVE_TOKEN_VAULT_ADDR, + abi.encode( + _l1ChainId, + _aliasedOwner, + _l2TokenProxyBytecodeHash, + _legacySharedBridge, + _l2TokenBeacon, + _contractsDeployedAlready, + address(0), + ethAssetId + ) + ); + } + + function forceDeployWithConstructor( + string memory _contractName, + address _address, + bytes memory _constructorArgs + ) public { + bytes memory bytecode = readEraBytecode(_contractName); + + bytes32 bytecodehash = L2ContractHelper.hashL2Bytecode(bytecode); + + IContractDeployer.ForceDeployment[] memory deployments = new IContractDeployer.ForceDeployment[](1); + deployments[0] = IContractDeployer.ForceDeployment({ + bytecodeHash: bytecodehash, + newAddress: _address, + callConstructor: true, + value: 0, + input: _constructorArgs + }); + + vm.prank(L2_FORCE_DEPLOYER_ADDR); + IContractDeployer(DEPLOYER_SYSTEM_CONTRACT).forceDeployOnAddresses(deployments); + } + + function deployViaCreat2L2( + bytes memory creationCode, + bytes memory constructorargs, + bytes32 create2salt + ) internal returns (address) { + bytes memory bytecode = abi.encodePacked(creationCode, constructorargs); + address contractAddress; + assembly { + contractAddress := create2(0, add(bytecode, 0x20), mload(bytecode), create2salt) + } + uint32 size; + assembly { + size := extcodesize(contractAddress) + } + if (size == 0) { + revert DeployFailed(); + } + return contractAddress; + } +} diff --git a/l1-contracts/test/foundry/l2/integration/WETH.t.sol b/l1-contracts/test/foundry/l2/integration/WETH.t.sol new file mode 100644 index 000000000..f7932b4eb --- /dev/null +++ b/l1-contracts/test/foundry/l2/integration/WETH.t.sol @@ -0,0 +1,36 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +import {Test} from "forge-std/Test.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; + +import {SharedL2ContractDeployer} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractDeployer.sol"; +import {SharedL2ContractL1DeployerUtils} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractL1DeployerUtils.sol"; +import {L2WethTestAbstract} from "../../l1/integration/l2-tests-in-l1-context/L2WethTestAbstract.t.sol"; + +import {SharedL2ContractL2DeployerUtils, SystemContractsArgs} from "./_SharedL2ContractL2DeployerUtils.sol"; + +contract WethTest is Test, L2WethTestAbstract, SharedL2ContractL2DeployerUtils { + function test() internal virtual override(DeployUtils, SharedL2ContractL2DeployerUtils) {} + + function initSystemContracts( + SystemContractsArgs memory _args + ) internal override(SharedL2ContractDeployer, SharedL2ContractL2DeployerUtils) { + super.initSystemContracts(_args); + } + + function deployViaCreate2( + bytes memory creationCode, + bytes memory constructorArgs + ) internal override(DeployUtils, SharedL2ContractL2DeployerUtils) returns (address) { + return super.deployViaCreate2(creationCode, constructorArgs); + } + + function deployL2Contracts( + uint256 _l1ChainId + ) public override(SharedL2ContractL1DeployerUtils, SharedL2ContractDeployer) { + super.deployL2Contracts(_l1ChainId); + } +} diff --git a/l1-contracts/test/foundry/l2/integration/_SharedL2ContractL2DeployerUtils.sol b/l1-contracts/test/foundry/l2/integration/_SharedL2ContractL2DeployerUtils.sol new file mode 100644 index 000000000..0b42255b5 --- /dev/null +++ b/l1-contracts/test/foundry/l2/integration/_SharedL2ContractL2DeployerUtils.sol @@ -0,0 +1,41 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; +import {StdStorage, stdStorage, stdToml} from "forge-std/Test.sol"; +import {Script, console2 as console} from "forge-std/Script.sol"; + +import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; +import {L1AssetRouter} from "contracts/bridge/asset-router/L1AssetRouter.sol"; +import {L1Nullifier} from "contracts/bridge/L1Nullifier.sol"; +import {L1NativeTokenVault} from "contracts/bridge/ntv/L1NativeTokenVault.sol"; +import {DataEncoding} from "contracts/common/libraries/DataEncoding.sol"; +import {CTMDeploymentTracker} from "contracts/bridgehub/CTMDeploymentTracker.sol"; +import {IChainTypeManager} from "contracts/state-transition/IChainTypeManager.sol"; +import {DeployedAddresses, Config} from "deploy-scripts/DeployUtils.s.sol"; + +import {DeployUtils} from "deploy-scripts/DeployUtils.s.sol"; + +import {L2_BRIDGEHUB_ADDR, L2_ASSET_ROUTER_ADDR, L2_NATIVE_TOKEN_VAULT_ADDR} from "contracts/common/L2ContractAddresses.sol"; + +import {L2Utils} from "./L2Utils.sol"; +import {SharedL2ContractL1DeployerUtils, SystemContractsArgs} from "../../l1/integration/l2-tests-in-l1-context/_SharedL2ContractL1DeployerUtils.sol"; + +contract SharedL2ContractL2DeployerUtils is DeployUtils, SharedL2ContractL1DeployerUtils { + using stdToml for string; + + function initSystemContracts(SystemContractsArgs memory _args) internal virtual override { + L2Utils.initSystemContracts(_args); + } + + function deployViaCreate2( + bytes memory creationCode, + bytes memory constructorArgs + ) internal virtual override returns (address) { + console.log("Deploying via create2 L2"); + return L2Utils.deployViaCreat2L2(creationCode, constructorArgs, config.contracts.create2FactorySalt); + } + + // add this to be excluded from coverage report + function test() internal virtual override(DeployUtils, SharedL2ContractL1DeployerUtils) {} +} diff --git a/l1-contracts/test/foundry/l2/unit/L2AdminFactory/L2AdminFactory.t.sol b/l1-contracts/test/foundry/l2/unit/L2AdminFactory/L2AdminFactory.t.sol new file mode 100644 index 000000000..7b85a8c54 --- /dev/null +++ b/l1-contracts/test/foundry/l2/unit/L2AdminFactory/L2AdminFactory.t.sol @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +import {Test} from "forge-std/Test.sol"; + +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {L2AdminFactory} from "contracts/governance/L2AdminFactory.sol"; +import {PermanentRestriction} from "contracts/governance/PermanentRestriction.sol"; +import {IPermanentRestriction} from "contracts/governance/IPermanentRestriction.sol"; + +contract L2AdminFactoryTest is Test { + function testL2AdminFactory() public { + address[] memory requiredRestrictions = new address[](1); + requiredRestrictions[0] = makeAddr("required"); + + L2AdminFactory factory = new L2AdminFactory(requiredRestrictions); + + address[] memory additionalRestrictions = new address[](1); + additionalRestrictions[0] = makeAddr("additional"); + + address[] memory allRestrictions = new address[](2); + allRestrictions[0] = requiredRestrictions[0]; + allRestrictions[1] = additionalRestrictions[0]; + + bytes32 salt = keccak256("salt"); + + address admin = factory.deployAdmin(additionalRestrictions, salt); + + // Now, we need to check whether it would be able to accept such an admin + PermanentRestriction restriction = new PermanentRestriction(IBridgehub(address(0)), address(factory)); + + bytes32 codeHash; + assembly { + codeHash := extcodehash(admin) + } + + vm.expectEmit(true, false, false, true); + emit IPermanentRestriction.AllowL2Admin(admin); + restriction.allowL2Admin(salt, codeHash, keccak256(abi.encode(allRestrictions))); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridgehub/experimental_bridge.t.sol b/l1-contracts/test/foundry/unit/concrete/Bridgehub/experimental_bridge.t.sol deleted file mode 100644 index ff747ddac..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Bridgehub/experimental_bridge.t.sol +++ /dev/null @@ -1,1108 +0,0 @@ -//SPDX-License-Identifier: UNLICENSED - -pragma solidity 0.8.24; - -import {stdStorage, StdStorage, Test} from "forge-std/Test.sol"; - -import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; -import {Bridgehub} from "contracts/bridgehub/Bridgehub.sol"; -import {ChainCreationParams} from "contracts/state-transition/IStateTransitionManager.sol"; -import {L2TransactionRequestDirect, L2TransactionRequestTwoBridgesOuter} from "contracts/bridgehub/IBridgehub.sol"; -import {DummyStateTransitionManagerWBH} from "contracts/dev-contracts/test/DummyStateTransitionManagerWithBridgeHubAddress.sol"; -import {DummyHyperchain} from "contracts/dev-contracts/test/DummyHyperchain.sol"; -import {DummySharedBridge} from "contracts/dev-contracts/test/DummySharedBridge.sol"; -import {IL1SharedBridge} from "contracts/bridge/interfaces/IL1SharedBridge.sol"; - -import {L2Message, L2Log, TxStatus, BridgehubL2TransactionRequest} from "contracts/common/Messaging.sol"; -import {ETH_TOKEN_ADDRESS, REQUIRED_L2_GAS_PRICE_PER_PUBDATA, MAX_NEW_FACTORY_DEPS} from "contracts/common/Config.sol"; - -contract ExperimentalBridgeTest is Test { - using stdStorage for StdStorage; - - Bridgehub bridgeHub; - address public bridgeOwner; - DummyStateTransitionManagerWBH mockSTM; - DummyHyperchain mockChainContract; - DummySharedBridge mockSharedBridge; - DummySharedBridge mockSecondSharedBridge; - TestnetERC20Token testToken; - - uint256 eraChainId; - - function setUp() public { - eraChainId = 9; - bridgeHub = new Bridgehub(); - bridgeOwner = makeAddr("BRIDGE_OWNER"); - mockSTM = new DummyStateTransitionManagerWBH(address(bridgeHub)); - mockChainContract = new DummyHyperchain(address(bridgeHub), eraChainId); - mockSharedBridge = new DummySharedBridge(keccak256("0xabc")); - mockSecondSharedBridge = new DummySharedBridge(keccak256("0xdef")); - testToken = new TestnetERC20Token("ZKSTT", "ZkSync Test Token", 18); - - // test if the ownership of the bridgeHub is set correctly or not - address defaultOwner = bridgeHub.owner(); - - // Now, the `reentrancyGuardInitializer` should prevent anyone from calling `initialize` since we have called the constructor of the contract - vm.expectRevert(bytes("1B")); - bridgeHub.initialize(bridgeOwner); - - vm.store( - address(mockChainContract), - 0x8e94fed44239eb2314ab7a406345e6c5a8f0ccedf3b600de3d004e672c33abf4, - bytes32(uint256(1)) - ); - bytes32 bridgehubLocation = bytes32(uint256(36)); - vm.store(address(mockChainContract), bridgehubLocation, bytes32(uint256(uint160(address(bridgeHub))))); - bytes32 baseTokenGasPriceNominatorLocation = bytes32(uint256(40)); - vm.store(address(mockChainContract), baseTokenGasPriceNominatorLocation, bytes32(uint256(1))); - bytes32 baseTokenGasPriceDenominatorLocation = bytes32(uint256(41)); - vm.store(address(mockChainContract), baseTokenGasPriceDenominatorLocation, bytes32(uint256(1))); - // The ownership can only be transferred by the current owner to a new owner via the two-step approach - - // Default owner calls transferOwnership - vm.prank(defaultOwner); - bridgeHub.transferOwnership(bridgeOwner); - - // bridgeOwner calls acceptOwnership - vm.prank(bridgeOwner); - bridgeHub.acceptOwnership(); - - // Ownership should have changed - assertEq(bridgeHub.owner(), bridgeOwner); - } - - function test_onlyOwnerCanSetDeployer(address randomDeployer) public { - assertEq(address(0), bridgeHub.admin()); - vm.prank(bridgeHub.owner()); - bridgeHub.setPendingAdmin(randomDeployer); - vm.prank(randomDeployer); - bridgeHub.acceptAdmin(); - - assertEq(randomDeployer, bridgeHub.admin()); - } - - function test_randomCallerCannotSetDeployer(address randomCaller, address randomDeployer) public { - if (randomCaller != bridgeHub.owner() && randomCaller != bridgeHub.admin()) { - vm.prank(randomCaller); - vm.expectRevert(bytes("Bridgehub: not owner or admin")); - bridgeHub.setPendingAdmin(randomDeployer); - - // The deployer shouldn't have changed. - assertEq(address(0), bridgeHub.admin()); - } - } - - function test_addStateTransitionManager(address randomAddressWithoutTheCorrectInterface) public { - bool isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(!isSTMRegistered); - - vm.prank(bridgeOwner); - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(isSTMRegistered); - - // An address that has already been registered, cannot be registered again (at least not before calling `removeStateTransitionManager`). - vm.prank(bridgeOwner); - vm.expectRevert(bytes("Bridgehub: state transition already registered")); - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(isSTMRegistered); - } - - function test_addStateTransitionManager_cannotBeCalledByRandomAddress( - address randomCaller, - address randomAddressWithoutTheCorrectInterface - ) public { - bool isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(!isSTMRegistered); - - if (randomCaller != bridgeOwner) { - vm.prank(randomCaller); - vm.expectRevert(bytes("Ownable: caller is not the owner")); - - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - } - - vm.prank(bridgeOwner); - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(isSTMRegistered); - - // An address that has already been registered, cannot be registered again (at least not before calling `removeStateTransitionManager`). - vm.prank(bridgeOwner); - vm.expectRevert(bytes("Bridgehub: state transition already registered")); - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - // Definitely not by a random caller - if (randomCaller != bridgeOwner) { - vm.prank(randomCaller); - vm.expectRevert("Ownable: caller is not the owner"); - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - } - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(isSTMRegistered); - } - - function test_removeStateTransitionManager(address randomAddressWithoutTheCorrectInterface) public { - bool isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(!isSTMRegistered); - - // A non-existent STM cannot be removed - vm.prank(bridgeOwner); - vm.expectRevert(bytes("Bridgehub: state transition not registered yet")); - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - // Let's first register our particular stateTransitionManager - vm.prank(bridgeOwner); - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(isSTMRegistered); - - // Only an address that has already been registered, can be removed. - vm.prank(bridgeOwner); - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(!isSTMRegistered); - - // An already removed STM cannot be removed again - vm.prank(bridgeOwner); - vm.expectRevert(bytes("Bridgehub: state transition not registered yet")); - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - } - - function test_removeStateTransitionManager_cannotBeCalledByRandomAddress( - address randomAddressWithoutTheCorrectInterface, - address randomCaller - ) public { - bool isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(!isSTMRegistered); - - if (randomCaller != bridgeOwner) { - vm.prank(randomCaller); - vm.expectRevert(bytes("Ownable: caller is not the owner")); - - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - } - - // A non-existent STM cannot be removed - vm.prank(bridgeOwner); - vm.expectRevert(bytes("Bridgehub: state transition not registered yet")); - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - // Let's first register our particular stateTransitionManager - vm.prank(bridgeOwner); - bridgeHub.addStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(isSTMRegistered); - - // Only an address that has already been registered, can be removed. - vm.prank(bridgeOwner); - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - isSTMRegistered = bridgeHub.stateTransitionManagerIsRegistered(randomAddressWithoutTheCorrectInterface); - assertTrue(!isSTMRegistered); - - // An already removed STM cannot be removed again - vm.prank(bridgeOwner); - vm.expectRevert(bytes("Bridgehub: state transition not registered yet")); - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - - // Not possible by a randomcaller as well - if (randomCaller != bridgeOwner) { - vm.prank(randomCaller); - vm.expectRevert(bytes("Ownable: caller is not the owner")); - bridgeHub.removeStateTransitionManager(randomAddressWithoutTheCorrectInterface); - } - } - - function test_addToken(address, address randomAddress) public { - assertTrue(!bridgeHub.tokenIsRegistered(randomAddress), "This random address is not registered as a token"); - - vm.prank(bridgeOwner); - bridgeHub.addToken(randomAddress); - - assertTrue( - bridgeHub.tokenIsRegistered(randomAddress), - "after call from the bridgeowner, this randomAddress should be a registered token" - ); - - if (randomAddress != address(testToken)) { - // Testing to see if an actual ERC20 implementation can also be added or not - vm.prank(bridgeOwner); - bridgeHub.addToken(address(testToken)); - - assertTrue(bridgeHub.tokenIsRegistered(address(testToken))); - } - - // An already registered token cannot be registered again - vm.prank(bridgeOwner); - vm.expectRevert("Bridgehub: token already registered"); - bridgeHub.addToken(randomAddress); - } - - function test_addToken_cannotBeCalledByRandomAddress(address randomAddress, address randomCaller) public { - vm.assume(randomAddress != bridgeOwner); - vm.assume(randomAddress != bridgeHub.admin()); - - vm.prank(randomCaller); - vm.expectRevert(bytes("Bridgehub: not owner or admin")); - bridgeHub.addToken(randomAddress); - - assertTrue(!bridgeHub.tokenIsRegistered(randomAddress), "This random address is not registered as a token"); - - vm.prank(bridgeOwner); - bridgeHub.addToken(randomAddress); - - assertTrue( - bridgeHub.tokenIsRegistered(randomAddress), - "after call from the bridgeowner, this randomAddress should be a registered token" - ); - - if (randomAddress != address(testToken)) { - // Testing to see if an actual ERC20 implementation can also be added or not - vm.prank(bridgeOwner); - bridgeHub.addToken(address(testToken)); - - assertTrue(bridgeHub.tokenIsRegistered(address(testToken))); - } - - // An already registered token cannot be registered again by randomCaller - if (randomCaller != bridgeOwner) { - vm.prank(bridgeOwner); - vm.expectRevert("Bridgehub: token already registered"); - bridgeHub.addToken(randomAddress); - } - } - - function test_setSharedBridge(address randomAddress) public { - assertTrue( - bridgeHub.sharedBridge() == IL1SharedBridge(address(0)), - "This random address is not registered as sharedBridge" - ); - - vm.prank(bridgeOwner); - bridgeHub.setSharedBridge(randomAddress); - - assertTrue( - bridgeHub.sharedBridge() == IL1SharedBridge(randomAddress), - "after call from the bridgeowner, this randomAddress should be the registered sharedBridge" - ); - } - - function test_setSharedBridge_cannotBeCalledByRandomAddress(address randomCaller, address randomAddress) public { - if (randomCaller != bridgeOwner) { - vm.prank(randomCaller); - vm.expectRevert(bytes("Ownable: caller is not the owner")); - bridgeHub.setSharedBridge(randomAddress); - } - - assertTrue( - bridgeHub.sharedBridge() == IL1SharedBridge(address(0)), - "This random address is not registered as sharedBridge" - ); - - vm.prank(bridgeOwner); - bridgeHub.setSharedBridge(randomAddress); - - assertTrue( - bridgeHub.sharedBridge() == IL1SharedBridge(randomAddress), - "after call from the bridgeowner, this randomAddress should be the registered sharedBridge" - ); - } - - uint256 newChainId; - address admin; - - function test_createNewChain( - address randomCaller, - uint256 chainId, - bool isFreezable, - bytes4[] memory mockSelectors, - address mockInitAddress, - bytes memory mockInitCalldata - ) public { - address deployerAddress = makeAddr("DEPLOYER_ADDRESS"); - admin = makeAddr("NEW_CHAIN_ADMIN"); - // Diamond.DiamondCutData memory dcData; - - vm.prank(bridgeOwner); - bridgeHub.setPendingAdmin(deployerAddress); - vm.prank(deployerAddress); - bridgeHub.acceptAdmin(); - vm.startPrank(bridgeOwner); - bridgeHub.addStateTransitionManager(address(mockSTM)); - bridgeHub.addToken(address(testToken)); - bridgeHub.setSharedBridge(address(mockSharedBridge)); - vm.stopPrank(); - - if (randomCaller != deployerAddress && randomCaller != bridgeOwner) { - vm.prank(randomCaller); - vm.expectRevert(bytes("Bridgehub: not owner or admin")); - bridgeHub.createNewChain({ - _chainId: chainId, - _stateTransitionManager: address(mockSTM), - _baseToken: address(testToken), - _salt: uint256(123), - _admin: admin, - _initData: bytes("") - }); - } - - chainId = bound(chainId, 1, type(uint48).max); - vm.prank(mockSTM.owner()); - bytes memory _newChainInitData = _createNewChainInitData( - isFreezable, - mockSelectors, - mockInitAddress, - mockInitCalldata - ); - - // bridgeHub.createNewChain => stateTransitionManager.createNewChain => this function sets the stateTransition mapping - // of `chainId`, let's emulate that using foundry cheatcodes or let's just use the extra function we introduced in our mockSTM - mockSTM.setHyperchain(chainId, address(mockChainContract)); - assertTrue(mockSTM.getHyperchain(chainId) == address(mockChainContract)); - - vm.startPrank(deployerAddress); - vm.mockCall( - address(mockSTM), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - mockSTM.createNewChain.selector, - chainId, - address(testToken), - address(mockSharedBridge), - admin, - _newChainInitData - ), - bytes("") - ); - - newChainId = bridgeHub.createNewChain({ - _chainId: chainId, - _stateTransitionManager: address(mockSTM), - _baseToken: address(testToken), - _salt: uint256(chainId * 2), - _admin: admin, - _initData: _newChainInitData - }); - - vm.stopPrank(); - vm.clearMockedCalls(); - - assertTrue(bridgeHub.stateTransitionManager(newChainId) == address(mockSTM)); - assertTrue(bridgeHub.baseToken(newChainId) == address(testToken)); - } - - function test_getHyperchain(uint256 mockChainId) public { - mockChainId = _setUpHyperchainForChainId(mockChainId); - - // Now the following statements should be true as well: - assertTrue(bridgeHub.stateTransitionManager(mockChainId) == address(mockSTM)); - address returnedHyperchain = bridgeHub.getHyperchain(mockChainId); - - assertEq(returnedHyperchain, address(mockChainContract)); - } - - function test_proveL2MessageInclusion( - uint256 mockChainId, - uint256 mockBatchNumber, - uint256 mockIndex, - bytes32[] memory mockProof, - uint16 randomTxNumInBatch, - address randomSender, - bytes memory randomData - ) public { - mockChainId = _setUpHyperchainForChainId(mockChainId); - - // Now the following statements should be true as well: - assertTrue(bridgeHub.stateTransitionManager(mockChainId) == address(mockSTM)); - assertTrue(bridgeHub.getHyperchain(mockChainId) == address(mockChainContract)); - - // Creating a random L2Message::l2Message so that we pass the correct parameters to `proveL2MessageInclusion` - L2Message memory l2Message = _createMockL2Message(randomTxNumInBatch, randomSender, randomData); - - // Since we have used random data for the `bridgeHub.proveL2MessageInclusion` function which basically forwards the call - // to the same function in the mailbox, we will mock the call to the mailbox to return true and see if it works. - vm.mockCall( - address(mockChainContract), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - mockChainContract.proveL2MessageInclusion.selector, - mockBatchNumber, - mockIndex, - l2Message, - mockProof - ), - abi.encode(true) - ); - - assertTrue( - bridgeHub.proveL2MessageInclusion({ - _chainId: mockChainId, - _batchNumber: mockBatchNumber, - _index: mockIndex, - _message: l2Message, - _proof: mockProof - }) - ); - vm.clearMockedCalls(); - } - - function test_proveL2LogInclusion( - uint256 mockChainId, - uint256 mockBatchNumber, - uint256 mockIndex, - bytes32[] memory mockProof, - uint8 randomL2ShardId, - bool randomIsService, - uint16 randomTxNumInBatch, - address randomSender, - bytes32 randomKey, - bytes32 randomValue - ) public { - mockChainId = _setUpHyperchainForChainId(mockChainId); - - // Now the following statements should be true as well: - assertTrue(bridgeHub.stateTransitionManager(mockChainId) == address(mockSTM)); - assertTrue(bridgeHub.getHyperchain(mockChainId) == address(mockChainContract)); - - // Creating a random L2Log::l2Log so that we pass the correct parameters to `proveL2LogInclusion` - L2Log memory l2Log = _createMockL2Log({ - randomL2ShardId: randomL2ShardId, - randomIsService: randomIsService, - randomTxNumInBatch: randomTxNumInBatch, - randomSender: randomSender, - randomKey: randomKey, - randomValue: randomValue - }); - - // Since we have used random data for the `bridgeHub.proveL2LogInclusion` function which basically forwards the call - // to the same function in the mailbox, we will mock the call to the mailbox to return true and see if it works. - vm.mockCall( - address(mockChainContract), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - mockChainContract.proveL2LogInclusion.selector, - mockBatchNumber, - mockIndex, - l2Log, - mockProof - ), - abi.encode(true) - ); - - assertTrue( - bridgeHub.proveL2LogInclusion({ - _chainId: mockChainId, - _batchNumber: mockBatchNumber, - _index: mockIndex, - _log: l2Log, - _proof: mockProof - }) - ); - vm.clearMockedCalls(); - } - - function test_proveL1ToL2TransactionStatus( - uint256 randomChainId, - bytes32 randomL2TxHash, - uint256 randomL2BatchNumber, - uint256 randomL2MessageIndex, - uint16 randomL2TxNumberInBatch, - bytes32[] memory randomMerkleProof, - bool randomResultantBool, - bool txStatusBool - ) public { - randomChainId = _setUpHyperchainForChainId(randomChainId); - - TxStatus txStatus; - - if (txStatusBool) { - txStatus = TxStatus.Failure; - } else { - txStatus = TxStatus.Success; - } - - vm.mockCall( - address(mockChainContract), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - mockChainContract.proveL1ToL2TransactionStatus.selector, - randomL2TxHash, - randomL2BatchNumber, - randomL2MessageIndex, - randomL2TxNumberInBatch, - randomMerkleProof, - txStatus - ), - abi.encode(randomResultantBool) - ); - - assertTrue( - bridgeHub.proveL1ToL2TransactionStatus({ - _chainId: randomChainId, - _l2TxHash: randomL2TxHash, - _l2BatchNumber: randomL2BatchNumber, - _l2MessageIndex: randomL2MessageIndex, - _l2TxNumberInBatch: randomL2TxNumberInBatch, - _merkleProof: randomMerkleProof, - _status: txStatus - }) == randomResultantBool - ); - } - - function test_l2TransactionBaseCost( - uint256 mockChainId, - uint256 mockGasPrice, - uint256 mockL2GasLimit, - uint256 mockL2GasPerPubdataByteLimit, - uint256 mockL2TxnCost - ) public { - mockChainId = _setUpHyperchainForChainId(mockChainId); - - vm.mockCall( - address(mockChainContract), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - mockChainContract.l2TransactionBaseCost.selector, - mockGasPrice, - mockL2GasLimit, - mockL2GasPerPubdataByteLimit - ), - abi.encode(mockL2TxnCost) - ); - - assertTrue( - bridgeHub.l2TransactionBaseCost(mockChainId, mockGasPrice, mockL2GasLimit, mockL2GasPerPubdataByteLimit) == - mockL2TxnCost - ); - vm.clearMockedCalls(); - } - - function test_requestL2TransactionDirect_ETHCase( - uint256 mockChainId, - uint256 mockMintValue, - address mockL2Contract, - uint256 mockL2Value, - bytes memory mockL2Calldata, - uint256 mockL2GasLimit, - uint256 mockL2GasPerPubdataByteLimit, - bytes[] memory mockFactoryDeps, - address mockRefundRecipient, - bytes[] memory mockRefundRecipientBH - ) public { - if (mockFactoryDeps.length > MAX_NEW_FACTORY_DEPS) { - mockFactoryDeps = _restrictArraySize(mockFactoryDeps, MAX_NEW_FACTORY_DEPS); - } - - L2TransactionRequestDirect memory l2TxnReqDirect = _createMockL2TransactionRequestDirect({ - mockChainId: mockChainId, - mockMintValue: mockMintValue, - mockL2Contract: mockL2Contract, - mockL2Value: mockL2Value, - mockL2Calldata: mockL2Calldata, - mockL2GasLimit: mockL2GasLimit, - mockL2GasPerPubdataByteLimit: mockL2GasPerPubdataByteLimit, - mockFactoryDeps: mockFactoryDeps, - mockRefundRecipient: mockRefundRecipient - }); - - l2TxnReqDirect.chainId = _setUpHyperchainForChainId(l2TxnReqDirect.chainId); - - assertTrue(!(bridgeHub.baseToken(l2TxnReqDirect.chainId) == ETH_TOKEN_ADDRESS)); - _setUpBaseTokenForChainId(l2TxnReqDirect.chainId, true); - assertTrue(bridgeHub.baseToken(l2TxnReqDirect.chainId) == ETH_TOKEN_ADDRESS); - - _setUpSharedBridge(); - - address randomCaller = makeAddr("RANDOM_CALLER"); - vm.deal(randomCaller, l2TxnReqDirect.mintValue); - - assertTrue(bridgeHub.getHyperchain(l2TxnReqDirect.chainId) == address(mockChainContract)); - bytes32 canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); - //BridgehubL2TransactionRequest memory bhL2TxnRequest = - _createBhL2TxnRequest(mockRefundRecipientBH); - - vm.mockCall( - address(mockChainContract), - abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), - abi.encode(canonicalHash) - ); - - mockChainContract.setFeeParams(); - mockChainContract.setBaseTokenGasMultiplierPrice(uint128(1), uint128(1)); - mockChainContract.setBridgeHubAddress(address(bridgeHub)); - assertTrue(mockChainContract.getBridgeHubAddress() == address(bridgeHub)); - - vm.txGasPrice(0.05 ether); - - vm.prank(randomCaller); - bytes32 resultantHash = bridgeHub.requestL2TransactionDirect{value: randomCaller.balance}(l2TxnReqDirect); - - assertTrue(resultantHash == canonicalHash); - } - - function test_requestL2TransactionDirect_NonETHCase( - uint256 mockChainId, - uint256 mockMintValue, - address mockL2Contract, - uint256 mockL2Value, - bytes memory mockL2Calldata, - uint256 mockL2GasLimit, - uint256 mockL2GasPerPubdataByteLimit, - bytes[] memory mockFactoryDeps, - address mockRefundRecipient - ) public { - if (mockFactoryDeps.length > MAX_NEW_FACTORY_DEPS) { - mockFactoryDeps = _restrictArraySize(mockFactoryDeps, MAX_NEW_FACTORY_DEPS); - } - - L2TransactionRequestDirect memory l2TxnReqDirect = _createMockL2TransactionRequestDirect({ - mockChainId: mockChainId, - mockMintValue: mockMintValue, - mockL2Contract: mockL2Contract, - mockL2Value: mockL2Value, - mockL2Calldata: mockL2Calldata, - mockL2GasLimit: mockL2GasLimit, - mockL2GasPerPubdataByteLimit: mockL2GasPerPubdataByteLimit, - mockFactoryDeps: mockFactoryDeps, - mockRefundRecipient: mockRefundRecipient - }); - - l2TxnReqDirect.chainId = _setUpHyperchainForChainId(l2TxnReqDirect.chainId); - - _setUpBaseTokenForChainId(l2TxnReqDirect.chainId, false); - _setUpSharedBridge(); - - assertTrue(bridgeHub.getHyperchain(l2TxnReqDirect.chainId) == address(mockChainContract)); - bytes32 canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); - - vm.mockCall( - address(mockChainContract), - abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), - abi.encode(canonicalHash) - ); - - mockChainContract.setFeeParams(); - mockChainContract.setBaseTokenGasMultiplierPrice(uint128(1), uint128(1)); - mockChainContract.setBridgeHubAddress(address(bridgeHub)); - assertTrue(mockChainContract.getBridgeHubAddress() == address(bridgeHub)); - - vm.txGasPrice(0.05 ether); - - address randomCaller = makeAddr("RANDOM_CALLER"); - vm.deal(randomCaller, 1 ether); - - vm.prank(randomCaller); - vm.expectRevert("Bridgehub: non-eth bridge with msg.value"); - bytes32 resultantHash = bridgeHub.requestL2TransactionDirect{value: randomCaller.balance}(l2TxnReqDirect); - - // Now, let's call the same function with zero msg.value - testToken.mint(randomCaller, l2TxnReqDirect.mintValue); - assertEq(testToken.balanceOf(randomCaller), l2TxnReqDirect.mintValue); - - vm.prank(randomCaller); - testToken.transfer(address(this), l2TxnReqDirect.mintValue); - assertEq(testToken.balanceOf(address(this)), l2TxnReqDirect.mintValue); - testToken.approve(address(mockSharedBridge), l2TxnReqDirect.mintValue); - - resultantHash = bridgeHub.requestL2TransactionDirect(l2TxnReqDirect); - - assertEq(canonicalHash, resultantHash); - } - - function test_requestL2TransactionTwoBridges_ETHCase( - uint256 chainId, - uint256 mintValue, - uint256 l2Value, - uint256 l2GasLimit, - uint256 l2GasPerPubdataByteLimit, - address refundRecipient, - uint256 secondBridgeValue, - bytes memory secondBridgeCalldata - ) public { - L2TransactionRequestTwoBridgesOuter memory l2TxnReq2BridgeOut = _createMockL2TransactionRequestTwoBridgesOuter({ - chainId: chainId, - mintValue: mintValue, - l2Value: l2Value, - l2GasLimit: l2GasLimit, - l2GasPerPubdataByteLimit: l2GasPerPubdataByteLimit, - refundRecipient: refundRecipient, - secondBridgeValue: secondBridgeValue, - secondBridgeCalldata: secondBridgeCalldata - }); - - l2TxnReq2BridgeOut.chainId = _setUpHyperchainForChainId(l2TxnReq2BridgeOut.chainId); - - _setUpBaseTokenForChainId(l2TxnReq2BridgeOut.chainId, true); - assertTrue(bridgeHub.baseToken(l2TxnReq2BridgeOut.chainId) == ETH_TOKEN_ADDRESS); - - _setUpSharedBridge(); - assertTrue(bridgeHub.getHyperchain(l2TxnReq2BridgeOut.chainId) == address(mockChainContract)); - - uint256 callerMsgValue = l2TxnReq2BridgeOut.mintValue + l2TxnReq2BridgeOut.secondBridgeValue; - address randomCaller = makeAddr("RANDOM_CALLER"); - vm.deal(randomCaller, callerMsgValue); - - mockChainContract.setBridgeHubAddress(address(bridgeHub)); - - bytes32 canonicalHash = keccak256(abi.encode("CANONICAL_TX_HASH")); - - vm.mockCall( - address(mockChainContract), - abi.encodeWithSelector(mockChainContract.bridgehubRequestL2Transaction.selector), - abi.encode(canonicalHash) - ); - - vm.prank(randomCaller); - //bytes32 resultantHash = - bridgeHub.requestL2TransactionTwoBridges{value: randomCaller.balance}(l2TxnReq2BridgeOut); - - assertTrue(true); - } - - ///////////////////////////////////////////////////////// - // INTERNAL UTILITY FUNCTIONS - ///////////////////////////////////////////////////////// - - function _createMockL2TransactionRequestTwoBridgesOuter( - uint256 chainId, - uint256 mintValue, - uint256 l2Value, - uint256 l2GasLimit, - uint256 l2GasPerPubdataByteLimit, - address refundRecipient, - uint256 secondBridgeValue, - bytes memory secondBridgeCalldata - ) internal view returns (L2TransactionRequestTwoBridgesOuter memory) { - L2TransactionRequestTwoBridgesOuter memory l2Req; - - // Don't let the mintValue + secondBridgeValue go beyond type(uint256).max since that calculation is required to be done by our test: test_requestL2TransactionTwoBridges_ETHCase - mintValue = bound(mintValue, 1, (type(uint256).max) / 2); - secondBridgeValue = bound(secondBridgeValue, 1, (type(uint256).max) / 2); - - l2Req.chainId = chainId; - l2Req.mintValue = mintValue; - l2Req.l2Value = l2Value; - l2Req.l2GasLimit = l2GasLimit; - l2Req.l2GasPerPubdataByteLimit = l2GasPerPubdataByteLimit; - l2Req.refundRecipient = refundRecipient; - l2Req.secondBridgeAddress = address(mockSecondSharedBridge); - l2Req.secondBridgeValue = secondBridgeValue; - l2Req.secondBridgeCalldata = secondBridgeCalldata; - - return l2Req; - } - - function _createMockL2Message( - uint16 randomTxNumInBatch, - address randomSender, - bytes memory randomData - ) internal pure returns (L2Message memory) { - L2Message memory l2Message; - - l2Message.txNumberInBatch = randomTxNumInBatch; - l2Message.sender = randomSender; - l2Message.data = randomData; - - return l2Message; - } - - function _createMockL2Log( - uint8 randomL2ShardId, - bool randomIsService, - uint16 randomTxNumInBatch, - address randomSender, - bytes32 randomKey, - bytes32 randomValue - ) internal pure returns (L2Log memory) { - L2Log memory l2Log; - - l2Log.l2ShardId = randomL2ShardId; - l2Log.isService = randomIsService; - l2Log.txNumberInBatch = randomTxNumInBatch; - l2Log.sender = randomSender; - l2Log.key = randomKey; - l2Log.value = randomValue; - - return l2Log; - } - - function _createNewChainInitData( - bool isFreezable, - bytes4[] memory mockSelectors, - address, //mockInitAddress, - bytes memory //mockInitCalldata - ) internal returns (bytes memory) { - bytes4[] memory singleSelector = new bytes4[](1); - singleSelector[0] = bytes4(0xabcdef12); - - Diamond.FacetCut memory facetCut; - Diamond.DiamondCutData memory diamondCutData; - - facetCut.facet = address(this); // for a random address, it will fail the check of _facet.code.length > 0 - facetCut.action = Diamond.Action.Add; - facetCut.isFreezable = isFreezable; - if (mockSelectors.length == 0) { - mockSelectors = singleSelector; - } - facetCut.selectors = mockSelectors; - - Diamond.FacetCut[] memory facetCuts = new Diamond.FacetCut[](1); - facetCuts[0] = facetCut; - - diamondCutData.facetCuts = facetCuts; - diamondCutData.initAddress = address(0); - diamondCutData.initCalldata = ""; - - ChainCreationParams memory params = ChainCreationParams({ - diamondCut: diamondCutData, - // Just some dummy values: - genesisUpgrade: address(0x01), - genesisBatchHash: bytes32(uint256(0x01)), - genesisIndexRepeatedStorageChanges: uint64(0x01), - genesisBatchCommitment: bytes32(uint256(0x01)) - }); - - mockSTM.setChainCreationParams(params); - - return abi.encode(diamondCutData); - } - - function _setUpHyperchainForChainId(uint256 mockChainId) internal returns (uint256 mockChainIdInRange) { - mockChainId = bound(mockChainId, 2, type(uint48).max); - mockChainIdInRange = mockChainId; - vm.prank(bridgeOwner); - bridgeHub.addStateTransitionManager(address(mockSTM)); - - // We need to set the stateTransitionManager of the mockChainId to mockSTM - // There is no function to do that in the bridgeHub - // So, perhaps we will have to manually set the values in the stateTransitionManager mapping via a foundry cheatcode - assertTrue(!(bridgeHub.stateTransitionManager(mockChainId) == address(mockSTM))); - - stdstore.target(address(bridgeHub)).sig("stateTransitionManager(uint256)").with_key(mockChainId).checked_write( - address(mockSTM) - ); - - // Now in the StateTransitionManager that has been set for our mockChainId, we set the hyperchain contract as our mockChainContract - mockSTM.setHyperchain(mockChainId, address(mockChainContract)); - } - - function _setUpBaseTokenForChainId(uint256 mockChainId, bool tokenIsETH) internal { - address baseToken = tokenIsETH ? ETH_TOKEN_ADDRESS : address(testToken); - - stdstore.target(address(bridgeHub)).sig("baseToken(uint256)").with_key(mockChainId).checked_write(baseToken); - } - - function _setUpSharedBridge() internal { - vm.prank(bridgeOwner); - bridgeHub.setSharedBridge(address(mockSharedBridge)); - } - - function _createMockL2TransactionRequestDirect( - uint256 mockChainId, - uint256 mockMintValue, - address mockL2Contract, - uint256 mockL2Value, - bytes memory mockL2Calldata, - uint256 mockL2GasLimit, - // solhint-disable-next-line no-unused-vars - uint256 mockL2GasPerPubdataByteLimit, - bytes[] memory mockFactoryDeps, - address mockRefundRecipient - ) internal pure returns (L2TransactionRequestDirect memory) { - L2TransactionRequestDirect memory l2TxnReqDirect; - - l2TxnReqDirect.chainId = mockChainId; - l2TxnReqDirect.mintValue = mockMintValue; - l2TxnReqDirect.l2Contract = mockL2Contract; - l2TxnReqDirect.l2Value = mockL2Value; - l2TxnReqDirect.l2Calldata = mockL2Calldata; - l2TxnReqDirect.l2GasLimit = mockL2GasLimit; - l2TxnReqDirect.l2GasPerPubdataByteLimit = REQUIRED_L2_GAS_PRICE_PER_PUBDATA; - l2TxnReqDirect.factoryDeps = mockFactoryDeps; - l2TxnReqDirect.refundRecipient = mockRefundRecipient; - - return l2TxnReqDirect; - } - - function _createBhL2TxnRequest( - bytes[] memory mockFactoryDepsBH - ) internal returns (BridgehubL2TransactionRequest memory) { - BridgehubL2TransactionRequest memory bhL2TxnRequest; - - bhL2TxnRequest.sender = makeAddr("BH_L2_REQUEST_SENDER"); - bhL2TxnRequest.contractL2 = makeAddr("BH_L2_REQUEST_CONTRACT"); - bhL2TxnRequest.mintValue = block.timestamp; - bhL2TxnRequest.l2Value = block.timestamp * 2; - bhL2TxnRequest.l2Calldata = abi.encode("mock L2 Calldata"); - bhL2TxnRequest.l2GasLimit = block.timestamp * 3; - bhL2TxnRequest.l2GasPerPubdataByteLimit = block.timestamp * 4; - bhL2TxnRequest.factoryDeps = mockFactoryDepsBH; - bhL2TxnRequest.refundRecipient = makeAddr("BH_L2_REQUEST_REFUND_RECIPIENT"); - - return bhL2TxnRequest; - } - - function _restrictArraySize(bytes[] memory longArray, uint256 newSize) internal pure returns (bytes[] memory) { - bytes[] memory shortArray = new bytes[](newSize); - - for (uint256 i; i < newSize; i++) { - shortArray[i] = longArray[i]; - } - - return shortArray; - } - - ///////////////////////////////////////////////////////// - // OLDER (HIGH-LEVEL MOCKED) TESTS - //////////////////////////////////////////////////////// - - function test_proveL2MessageInclusion_old( - uint256 mockChainId, - uint256 mockBatchNumber, - uint256 mockIndex, - bytes32[] memory mockProof, - uint16 randomTxNumInBatch, - address randomSender, - bytes memory randomData - ) public { - vm.startPrank(bridgeOwner); - bridgeHub.addStateTransitionManager(address(mockSTM)); - vm.stopPrank(); - - L2Message memory l2Message = _createMockL2Message(randomTxNumInBatch, randomSender, randomData); - - vm.mockCall( - address(bridgeHub), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - bridgeHub.proveL2MessageInclusion.selector, - mockChainId, - mockBatchNumber, - mockIndex, - l2Message, - mockProof - ), - abi.encode(true) - ); - - assertTrue( - bridgeHub.proveL2MessageInclusion({ - _chainId: mockChainId, - _batchNumber: mockBatchNumber, - _index: mockIndex, - _message: l2Message, - _proof: mockProof - }) - ); - } - - function test_proveL2LogInclusion_old( - uint256 mockChainId, - uint256 mockBatchNumber, - uint256 mockIndex, - bytes32[] memory mockProof, - uint8 randomL2ShardId, - bool randomIsService, - uint16 randomTxNumInBatch, - address randomSender, - bytes32 randomKey, - bytes32 randomValue - ) public { - vm.startPrank(bridgeOwner); - bridgeHub.addStateTransitionManager(address(mockSTM)); - vm.stopPrank(); - - L2Log memory l2Log = _createMockL2Log({ - randomL2ShardId: randomL2ShardId, - randomIsService: randomIsService, - randomTxNumInBatch: randomTxNumInBatch, - randomSender: randomSender, - randomKey: randomKey, - randomValue: randomValue - }); - - vm.mockCall( - address(bridgeHub), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - bridgeHub.proveL2LogInclusion.selector, - mockChainId, - mockBatchNumber, - mockIndex, - l2Log, - mockProof - ), - abi.encode(true) - ); - - assertTrue( - bridgeHub.proveL2LogInclusion({ - _chainId: mockChainId, - _batchNumber: mockBatchNumber, - _index: mockIndex, - _log: l2Log, - _proof: mockProof - }) - ); - } - - function test_proveL1ToL2TransactionStatus_old( - uint256 randomChainId, - bytes32 randomL2TxHash, - uint256 randomL2BatchNumber, - uint256 randomL2MessageIndex, - uint16 randomL2TxNumberInBatch, - bytes32[] memory randomMerkleProof, - bool randomResultantBool - ) public { - vm.startPrank(bridgeOwner); - bridgeHub.addStateTransitionManager(address(mockSTM)); - vm.stopPrank(); - - TxStatus txStatus; - - if (randomChainId % 2 == 0) { - txStatus = TxStatus.Failure; - } else { - txStatus = TxStatus.Success; - } - - vm.mockCall( - address(bridgeHub), - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - bridgeHub.proveL1ToL2TransactionStatus.selector, - randomChainId, - randomL2TxHash, - randomL2BatchNumber, - randomL2MessageIndex, - randomL2TxNumberInBatch, - randomMerkleProof, - txStatus - ), - abi.encode(randomResultantBool) - ); - - assertTrue( - bridgeHub.proveL1ToL2TransactionStatus({ - _chainId: randomChainId, - _l2TxHash: randomL2TxHash, - _l2BatchNumber: randomL2BatchNumber, - _l2MessageIndex: randomL2MessageIndex, - _l2TxNumberInBatch: randomL2TxNumberInBatch, - _merkleProof: randomMerkleProof, - _status: txStatus - }) == randomResultantBool - ); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/ClaimFailedDeposit.t.sol b/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/ClaimFailedDeposit.t.sol deleted file mode 100644 index 89a20d90d..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1Erc20Bridge/ClaimFailedDeposit.t.sol +++ /dev/null @@ -1,62 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {L1Erc20BridgeTest} from "./_L1Erc20Bridge_Shared.t.sol"; -import {StdStorage, stdStorage} from "forge-std/Test.sol"; - -contract ClaimFailedDepositTest is L1Erc20BridgeTest { - using stdStorage for StdStorage; - - event ClaimedFailedDeposit(address indexed to, address indexed l1Token, uint256 amount); - - function test_RevertWhen_ClaimAmountIsZero() public { - vm.expectRevert(bytes("2T")); - bytes32[] memory merkleProof; - - bridge.claimFailedDeposit({ - _depositSender: randomSigner, - _l1Token: address(token), - _l2TxHash: dummyL2DepositTxHash, - _l2BatchNumber: 0, - _l2MessageIndex: 0, - _l2TxNumberInBatch: 0, - _merkleProof: merkleProof - }); - } - - function test_claimFailedDepositSuccessfully() public { - uint256 depositedAmountBefore = bridge.depositAmount(alice, address(token), dummyL2DepositTxHash); - assertEq(depositedAmountBefore, 0); - - uint256 amount = 16; - stdstore - .target(address(bridge)) - .sig("depositAmount(address,address,bytes32)") - .with_key(alice) - .with_key(address(token)) - .with_key(dummyL2DepositTxHash) - .checked_write(amount); - - uint256 depositedAmountAfterDeposit = bridge.depositAmount(alice, address(token), dummyL2DepositTxHash); - assertEq(depositedAmountAfterDeposit, amount); - - vm.prank(alice); - // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(bridge)); - emit ClaimedFailedDeposit(alice, address(token), amount); - bytes32[] memory merkleProof; - bridge.claimFailedDeposit({ - _depositSender: alice, - _l1Token: address(token), - _l2TxHash: dummyL2DepositTxHash, - _l2BatchNumber: 0, - _l2MessageIndex: 0, - _l2TxNumberInBatch: 0, - _merkleProof: merkleProof - }); - - uint256 depositedAmountAfterWithdrawal = bridge.depositAmount(alice, address(token), dummyL2DepositTxHash); - assertEq(depositedAmountAfterWithdrawal, 0); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeAdmin.t.sol b/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeAdmin.t.sol deleted file mode 100644 index af97e3ed2..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeAdmin.t.sol +++ /dev/null @@ -1,26 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {L1SharedBridgeTest} from "./_L1SharedBridge_Shared.t.sol"; - -/// We are testing all the specified revert and require cases. -contract L1SharedBridgeAdminTest is L1SharedBridgeTest { - uint256 internal randomChainId = 123456; - - function testAdminCanInitializeChainGovernance() public { - address randomL2Bridge = makeAddr("randomL2Bridge"); - - vm.prank(admin); - sharedBridge.initializeChainGovernance(randomChainId, randomL2Bridge); - - assertEq(sharedBridge.l2BridgeAddress(randomChainId), randomL2Bridge); - } - - function testAdminCanNotReinitializeChainGovernance() public { - address randomNewBridge = makeAddr("randomNewBridge"); - - vm.expectRevert("Ownable: caller is not the owner"); - vm.prank(admin); - sharedBridge.reinitializeChainGovernance(randomChainId, randomNewBridge); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeFails.t.sol b/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeFails.t.sol deleted file mode 100644 index 97bbe2ec2..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/L1SharedBridgeFails.t.sol +++ /dev/null @@ -1,620 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {L1SharedBridgeTest} from "./_L1SharedBridge_Shared.t.sol"; - -import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; -import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; - -import {L1SharedBridge} from "contracts/bridge/L1SharedBridge.sol"; -import {ETH_TOKEN_ADDRESS} from "contracts/common/Config.sol"; -import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; -import {L2Message, TxStatus} from "contracts/common/Messaging.sol"; -import {IMailbox} from "contracts/state-transition/chain-interfaces/IMailbox.sol"; -import {IL1ERC20Bridge} from "contracts/bridge/interfaces/IL1ERC20Bridge.sol"; -import {L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR} from "contracts/common/L2ContractAddresses.sol"; -import {IGetters} from "contracts/state-transition/chain-interfaces/IGetters.sol"; - -/// We are testing all the specified revert and require cases. -contract L1SharedBridgeFailTest is L1SharedBridgeTest { - function test_initialize_wrongOwner() public { - vm.expectRevert("ShB owner 0"); - new TransparentUpgradeableProxy( - address(sharedBridgeImpl), - proxyAdmin, - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector(L1SharedBridge.initialize.selector, address(0), eraPostUpgradeFirstBatch) - ); - } - - function test_bridgehubDepositBaseToken_EthwrongMsgValue() public { - vm.deal(bridgehubAddress, amount); - vm.prank(bridgehubAddress); - vm.expectRevert("L1SharedBridge: msg.value not equal to amount"); - sharedBridge.bridgehubDepositBaseToken(chainId, alice, ETH_TOKEN_ADDRESS, amount); - } - - function test_bridgehubDepositBaseToken_ErcWrongMsgValue() public { - vm.deal(bridgehubAddress, amount); - token.mint(alice, amount); - vm.prank(alice); - token.approve(address(sharedBridge), amount); - vm.prank(bridgehubAddress); - vm.expectRevert("ShB m.v > 0 b d.it"); - sharedBridge.bridgehubDepositBaseToken{value: amount}(chainId, alice, address(token), amount); - } - - function test_bridgehubDepositBaseToken_ErcWrongErcDepositAmount() public { - token.mint(alice, amount); - vm.prank(alice); - token.approve(address(sharedBridge), amount); - - vm.mockCall(address(token), abi.encodeWithSelector(IERC20.balanceOf.selector), abi.encode(10)); - - bytes memory message = bytes("3T"); - vm.expectRevert(message); - vm.prank(bridgehubAddress); - sharedBridge.bridgehubDepositBaseToken(chainId, alice, address(token), amount); - } - - function test_bridgehubDeposit_Eth_l2BridgeNotDeployed() public { - vm.prank(owner); - sharedBridge.reinitializeChainGovernance(chainId, address(0)); - vm.deal(bridgehubAddress, amount); - vm.prank(bridgehubAddress); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); - vm.expectRevert("ShB l2 bridge not deployed"); - // solhint-disable-next-line func-named-parameters - sharedBridge.bridgehubDeposit{value: amount}(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, 0, bob)); - } - - function test_bridgehubDeposit_Erc_weth() public { - vm.prank(bridgehubAddress); - vm.expectRevert("ShB: WETH deposit not supported"); - // solhint-disable-next-line func-named-parameters - sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(l1WethAddress, amount, bob)); - } - - function test_bridgehubDeposit_Eth_baseToken() public { - vm.prank(bridgehubAddress); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - vm.expectRevert("ShB: baseToken deposit not supported"); - // solhint-disable-next-line func-named-parameters - sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, 0, bob)); - } - - function test_bridgehubDeposit_Eth_wrongDepositAmount() public { - token.mint(alice, amount); - vm.prank(alice); - token.approve(address(sharedBridge), amount); - vm.prank(bridgehubAddress); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); - vm.expectRevert("ShB wrong withdraw amount"); - // solhint-disable-next-line func-named-parameters - sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, amount, bob)); - } - - function test_bridgehubDeposit_Erc_msgValue() public { - vm.deal(bridgehubAddress, amount); - token.mint(alice, amount); - vm.prank(alice); - token.approve(address(sharedBridge), amount); - vm.prank(bridgehubAddress); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - vm.expectRevert("ShB m.v > 0 for BH d.it 2"); - // solhint-disable-next-line func-named-parameters - sharedBridge.bridgehubDeposit{value: amount}(chainId, alice, 0, abi.encode(address(token), amount, bob)); - } - - function test_bridgehubDeposit_Erc_wrongDepositAmount() public { - token.mint(alice, amount); - vm.prank(alice); - token.approve(address(sharedBridge), amount); - vm.prank(bridgehubAddress); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - vm.mockCall(address(token), abi.encodeWithSelector(IERC20.balanceOf.selector), abi.encode(10)); - bytes memory message = bytes("5T"); - vm.expectRevert(message); - // solhint-disable-next-line func-named-parameters - sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(address(token), amount, bob)); - } - - function test_bridgehubDeposit_Eth() public { - vm.prank(bridgehubAddress); - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(address(token)) - ); - bytes memory message = bytes("6T"); - vm.expectRevert(message); - // solhint-disable-next-line func-named-parameters - sharedBridge.bridgehubDeposit(chainId, alice, 0, abi.encode(ETH_TOKEN_ADDRESS, 0, bob)); - } - - function test_bridgehubConfirmL2Transaction_depositAlreadyHappened() public { - bytes32 txDataHash = keccak256(abi.encode(alice, address(token), amount)); - _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); - vm.prank(bridgehubAddress); - vm.expectRevert("ShB tx hap"); - sharedBridge.bridgehubConfirmL2Transaction(chainId, txDataHash, txHash); - } - - function test_claimFailedDeposit_proofInvalid() public { - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.proveL1ToL2TransactionStatus.selector), - abi.encode(address(0)) - ); - vm.prank(bridgehubAddress); - bytes memory message = bytes("yn"); - vm.expectRevert(message); - sharedBridge.claimFailedDeposit({ - _chainId: chainId, - _depositSender: alice, - _l1Token: ETH_TOKEN_ADDRESS, - _amount: amount, - _l2TxHash: txHash, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _merkleProof: merkleProof - }); - } - - function test_claimFailedDeposit_amountZero() public { - vm.deal(address(sharedBridge), amount); - - vm.mockCall( - bridgehubAddress, - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - IBridgehub.proveL1ToL2TransactionStatus.selector, - chainId, - txHash, - l2BatchNumber, - l2MessageIndex, - l2TxNumberInBatch, - merkleProof, - TxStatus.Failure - ), - abi.encode(true) - ); - - bytes memory message = bytes("y1"); - vm.expectRevert(message); - sharedBridge.claimFailedDeposit({ - _chainId: chainId, - _depositSender: alice, - _l1Token: ETH_TOKEN_ADDRESS, - _amount: 0, - _l2TxHash: txHash, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _merkleProof: merkleProof - }); - } - - function test_claimFailedDeposit_depositDidNotHappen() public { - vm.deal(address(sharedBridge), amount); - - vm.mockCall( - bridgehubAddress, - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - IBridgehub.proveL1ToL2TransactionStatus.selector, - chainId, - txHash, - l2BatchNumber, - l2MessageIndex, - l2TxNumberInBatch, - merkleProof, - TxStatus.Failure - ), - abi.encode(true) - ); - - vm.expectRevert("ShB: d.it not hap"); - sharedBridge.claimFailedDeposit({ - _chainId: chainId, - _depositSender: alice, - _l1Token: ETH_TOKEN_ADDRESS, - _amount: amount, - _l2TxHash: txHash, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _merkleProof: merkleProof - }); - } - - function test_claimFailedDeposit_chainBalanceLow() public { - vm.deal(address(sharedBridge), amount); - - bytes32 txDataHash = keccak256(abi.encode(alice, ETH_TOKEN_ADDRESS, amount)); - _setSharedBridgeDepositHappened(chainId, txHash, txDataHash); - require(sharedBridge.depositHappened(chainId, txHash) == txDataHash, "Deposit not set"); - - vm.mockCall( - bridgehubAddress, - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - IBridgehub.proveL1ToL2TransactionStatus.selector, - chainId, - txHash, - l2BatchNumber, - l2MessageIndex, - l2TxNumberInBatch, - merkleProof, - TxStatus.Failure - ), - abi.encode(true) - ); - - vm.expectRevert("ShB n funds"); - sharedBridge.claimFailedDeposit({ - _chainId: chainId, - _depositSender: alice, - _l1Token: ETH_TOKEN_ADDRESS, - _amount: amount, - _l2TxHash: txHash, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _merkleProof: merkleProof - }); - } - - function test_finalizeWithdrawal_EthOnEth_LegacyTxFinalizedInERC20Bridge() public { - vm.deal(address(sharedBridge), amount); - uint256 legacyBatchNumber = 0; - - vm.mockCall( - l1ERC20BridgeAddress, - abi.encodeWithSelector(IL1ERC20Bridge.isWithdrawalFinalized.selector), - abi.encode(true) - ); - - bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount - ); - - vm.expectRevert("ShB: legacy withdrawal"); - sharedBridge.finalizeWithdrawal({ - _chainId: eraChainId, - _l2BatchNumber: legacyBatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_finalizeWithdrawal_EthOnEth_LegacyTxFinalizedInSharedBridge() public { - vm.deal(address(sharedBridge), amount); - uint256 legacyBatchNumber = 0; - - vm.mockCall( - l1ERC20BridgeAddress, - abi.encodeWithSelector(IL1ERC20Bridge.isWithdrawalFinalized.selector), - abi.encode(false) - ); - - vm.store( - address(sharedBridge), - keccak256( - abi.encode( - l2MessageIndex, - keccak256( - abi.encode( - legacyBatchNumber, - keccak256(abi.encode(eraChainId, isWithdrawalFinalizedStorageLocation)) - ) - ) - ) - ), - bytes32(uint256(1)) - ); - - bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount - ); - - vm.expectRevert("Withdrawal is already finalized"); - sharedBridge.finalizeWithdrawal({ - _chainId: eraChainId, - _l2BatchNumber: legacyBatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_finalizeWithdrawal_EthOnEth_LegacyTxFinalizedInDiamondProxy() public { - vm.deal(address(sharedBridge), amount); - uint256 legacyBatchNumber = 0; - - vm.mockCall( - l1ERC20BridgeAddress, - abi.encodeWithSelector(IL1ERC20Bridge.isWithdrawalFinalized.selector), - abi.encode(false) - ); - - vm.mockCall( - eraDiamondProxy, - abi.encodeWithSelector(IGetters.isEthWithdrawalFinalized.selector), - abi.encode(true) - ); - - bytes memory message = abi.encodePacked( - IL1ERC20Bridge.finalizeWithdrawal.selector, - alice, - address(token), - amount - ); - vm.expectRevert("Withdrawal is already finalized 2"); - - sharedBridge.finalizeWithdrawal({ - _chainId: eraChainId, - _l2BatchNumber: legacyBatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_finalizeWithdrawal_chainBalance() public { - vm.deal(address(sharedBridge), amount); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - - bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); - L2Message memory l2ToL1Message = L2Message({ - txNumberInBatch: l2TxNumberInBatch, - sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, - data: message - }); - - vm.mockCall( - bridgehubAddress, - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - IBridgehub.proveL2MessageInclusion.selector, - chainId, - l2BatchNumber, - l2MessageIndex, - l2ToL1Message, - merkleProof - ), - abi.encode(true) - ); - - vm.expectRevert("ShB not enough funds 2"); - - sharedBridge.finalizeWithdrawal({ - _chainId: chainId, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_checkWithdrawal_wrongProof() public { - vm.deal(address(sharedBridge), amount); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - - bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector, alice, amount); - L2Message memory l2ToL1Message = L2Message({ - txNumberInBatch: l2TxNumberInBatch, - sender: L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR, - data: message - }); - - vm.mockCall( - bridgehubAddress, - // solhint-disable-next-line func-named-parameters - abi.encodeWithSelector( - IBridgehub.proveL2MessageInclusion.selector, - chainId, - l2BatchNumber, - l2MessageIndex, - l2ToL1Message, - merkleProof - ), - abi.encode(false) - ); - - vm.expectRevert("ShB withd w proof"); - - sharedBridge.finalizeWithdrawal({ - _chainId: chainId, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_parseL2WithdrawalMessage_WrongMsgLength() public { - vm.deal(address(sharedBridge), amount); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - - bytes memory message = abi.encodePacked(IMailbox.finalizeEthWithdrawal.selector); - - vm.expectRevert("ShB wrong msg len"); - sharedBridge.finalizeWithdrawal({ - _chainId: chainId, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_parseL2WithdrawalMessage_WrongMsgLength2() public { - vm.deal(address(sharedBridge), amount); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector, alice, amount), - abi.encode(ETH_TOKEN_ADDRESS) - ); - - bytes memory message = abi.encodePacked(IL1ERC20Bridge.finalizeWithdrawal.selector, alice, amount); - // should have more data here - - vm.expectRevert("ShB wrong msg len 2"); - - sharedBridge.finalizeWithdrawal({ - _chainId: eraChainId, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_parseL2WithdrawalMessage_WrongSelector() public { - vm.deal(address(sharedBridge), amount); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.baseToken.selector), - abi.encode(ETH_TOKEN_ADDRESS) - ); - - // notice that the selector is wrong - bytes memory message = abi.encodePacked(IMailbox.proveL2LogInclusion.selector, alice, amount); - - vm.expectRevert("ShB Incorrect message function selector"); - sharedBridge.finalizeWithdrawal({ - _chainId: eraChainId, - _l2BatchNumber: l2BatchNumber, - _l2MessageIndex: l2MessageIndex, - _l2TxNumberInBatch: l2TxNumberInBatch, - _message: message, - _merkleProof: merkleProof - }); - } - - function test_depositLegacyERC20Bridge_l2BridgeNotDeployed() public { - uint256 l2TxGasLimit = 100000; - uint256 l2TxGasPerPubdataByte = 100; - address refundRecipient = address(0); - - vm.prank(owner); - sharedBridge.reinitializeChainGovernance(eraChainId, address(0)); - - vm.expectRevert("ShB b. n dep"); - vm.prank(l1ERC20BridgeAddress); - sharedBridge.depositLegacyErc20Bridge({ - _prevMsgSender: alice, - _l2Receiver: bob, - _l1Token: address(token), - _amount: amount, - _l2TxGasLimit: l2TxGasLimit, - _l2TxGasPerPubdataByte: l2TxGasPerPubdataByte, - _refundRecipient: refundRecipient - }); - } - - function test_depositLegacyERC20Bridge_weth() public { - uint256 l2TxGasLimit = 100000; - uint256 l2TxGasPerPubdataByte = 100; - address refundRecipient = address(0); - - vm.expectRevert("ShB: WETH deposit not supported 2"); - vm.prank(l1ERC20BridgeAddress); - sharedBridge.depositLegacyErc20Bridge({ - _prevMsgSender: alice, - _l2Receiver: bob, - _l1Token: l1WethAddress, - _amount: amount, - _l2TxGasLimit: l2TxGasLimit, - _l2TxGasPerPubdataByte: l2TxGasPerPubdataByte, - _refundRecipient: refundRecipient - }); - } - - function test_depositLegacyERC20Bridge_refundRecipient() public { - uint256 l2TxGasLimit = 100000; - uint256 l2TxGasPerPubdataByte = 100; - - // solhint-disable-next-line func-named-parameters - vm.expectEmit(true, true, true, true, address(sharedBridge)); - - emit LegacyDepositInitiated({ - chainId: eraChainId, - l2DepositTxHash: txHash, - from: alice, - to: bob, - l1Token: address(token), - amount: amount - }); - - vm.mockCall( - bridgehubAddress, - abi.encodeWithSelector(IBridgehub.requestL2TransactionDirect.selector), - abi.encode(txHash) - ); - - vm.prank(l1ERC20BridgeAddress); - sharedBridge.depositLegacyErc20Bridge({ - _prevMsgSender: alice, - _l2Receiver: bob, - _l1Token: address(token), - _amount: amount, - _l2TxGasLimit: l2TxGasLimit, - _l2TxGasPerPubdataByte: l2TxGasPerPubdataByte, - _refundRecipient: address(1) - }); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/_L1SharedBridge_Shared.t.sol b/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/_L1SharedBridge_Shared.t.sol deleted file mode 100644 index 4554e4a36..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Bridges/L1SharedBridge/_L1SharedBridge_Shared.t.sol +++ /dev/null @@ -1,163 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {StdStorage, stdStorage} from "forge-std/Test.sol"; -import {Test} from "forge-std/Test.sol"; - -import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; - -import {L1SharedBridge} from "contracts/bridge/L1SharedBridge.sol"; -import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; -import {TestnetERC20Token} from "contracts/dev-contracts/TestnetERC20Token.sol"; - -contract L1SharedBridgeTest is Test { - using stdStorage for StdStorage; - - event BridgehubDepositBaseTokenInitiated( - uint256 indexed chainId, - address indexed from, - address l1Token, - uint256 amount - ); - - event BridgehubDepositInitiated( - uint256 indexed chainId, - bytes32 indexed txDataHash, - address indexed from, - address to, - address l1Token, - uint256 amount - ); - - event BridgehubDepositFinalized( - uint256 indexed chainId, - bytes32 indexed txDataHash, - bytes32 indexed l2DepositTxHash - ); - - event WithdrawalFinalizedSharedBridge( - uint256 indexed chainId, - address indexed to, - address indexed l1Token, - uint256 amount - ); - - event ClaimedFailedDepositSharedBridge( - uint256 indexed chainId, - address indexed to, - address indexed l1Token, - uint256 amount - ); - - event LegacyDepositInitiated( - uint256 indexed chainId, - bytes32 indexed l2DepositTxHash, - address indexed from, - address to, - address l1Token, - uint256 amount - ); - - L1SharedBridge sharedBridgeImpl; - L1SharedBridge sharedBridge; - address bridgehubAddress; - address l1ERC20BridgeAddress; - address l1WethAddress; - address l2SharedBridge; - TestnetERC20Token token; - uint256 eraPostUpgradeFirstBatch; - - address owner; - address admin; - address proxyAdmin; - address zkSync; - address alice; - address bob; - uint256 chainId; - uint256 amount = 100; - bytes32 txHash; - - uint256 eraChainId; - address eraDiamondProxy; - address eraErc20BridgeAddress; - - uint256 l2BatchNumber; - uint256 l2MessageIndex; - uint16 l2TxNumberInBatch; - bytes32[] merkleProof; - - uint256 isWithdrawalFinalizedStorageLocation = uint256(8 - 1 + (1 + 49) + 0 + (1 + 49) + 50 + 1 + 50); - - function setUp() public { - owner = makeAddr("owner"); - admin = makeAddr("admin"); - proxyAdmin = makeAddr("proxyAdmin"); - // zkSync = makeAddr("zkSync"); - bridgehubAddress = makeAddr("bridgehub"); - alice = makeAddr("alice"); - // bob = makeAddr("bob"); - l1WethAddress = makeAddr("weth"); - l1ERC20BridgeAddress = makeAddr("l1ERC20Bridge"); - l2SharedBridge = makeAddr("l2SharedBridge"); - - txHash = bytes32(uint256(uint160(makeAddr("txHash")))); - l2BatchNumber = uint256(uint160(makeAddr("l2BatchNumber"))); - l2MessageIndex = uint256(uint160(makeAddr("l2MessageIndex"))); - l2TxNumberInBatch = uint16(uint160(makeAddr("l2TxNumberInBatch"))); - merkleProof = new bytes32[](1); - eraPostUpgradeFirstBatch = 1; - - chainId = 1; - eraChainId = 9; - eraDiamondProxy = makeAddr("eraDiamondProxy"); - eraErc20BridgeAddress = makeAddr("eraErc20BridgeAddress"); - - token = new TestnetERC20Token("TestnetERC20Token", "TET", 18); - sharedBridgeImpl = new L1SharedBridge({ - _l1WethAddress: l1WethAddress, - _bridgehub: IBridgehub(bridgehubAddress), - _eraChainId: eraChainId, - _eraDiamondProxy: eraDiamondProxy - }); - TransparentUpgradeableProxy sharedBridgeProxy = new TransparentUpgradeableProxy( - address(sharedBridgeImpl), - proxyAdmin, - abi.encodeWithSelector(L1SharedBridge.initialize.selector, owner) - ); - sharedBridge = L1SharedBridge(payable(sharedBridgeProxy)); - vm.prank(owner); - sharedBridge.setL1Erc20Bridge(l1ERC20BridgeAddress); - vm.prank(owner); - sharedBridge.setEraPostDiamondUpgradeFirstBatch(eraPostUpgradeFirstBatch); - vm.prank(owner); - sharedBridge.setEraPostLegacyBridgeUpgradeFirstBatch(eraPostUpgradeFirstBatch); - vm.prank(owner); - sharedBridge.setEraLegacyBridgeLastDepositTime(1, 0); - vm.prank(owner); - sharedBridge.initializeChainGovernance(chainId, l2SharedBridge); - vm.prank(owner); - sharedBridge.initializeChainGovernance(eraChainId, l2SharedBridge); - vm.prank(owner); - sharedBridge.setPendingAdmin(admin); - vm.prank(admin); - sharedBridge.acceptAdmin(); - } - - function _setSharedBridgeDepositHappened(uint256 _chainId, bytes32 _txHash, bytes32 _txDataHash) internal { - stdstore - .target(address(sharedBridge)) - .sig(sharedBridge.depositHappened.selector) - .with_key(_chainId) - .with_key(_txHash) - .checked_write(_txDataHash); - } - - function _setSharedBridgeChainBalance(uint256 _chainId, address _token, uint256 _value) internal { - stdstore - .target(address(sharedBridge)) - .sig(sharedBridge.chainBalance.selector) - .with_key(_chainId) - .with_key(_token) - .checked_write(_value); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/Executor/Committing.t.sol b/l1-contracts/test/foundry/unit/concrete/Executor/Committing.t.sol deleted file mode 100644 index ce3e5947c..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Executor/Committing.t.sol +++ /dev/null @@ -1,827 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {Vm} from "forge-std/Test.sol"; -import {Utils, L2_BOOTLOADER_ADDRESS, L2_SYSTEM_CONTEXT_ADDRESS} from "../Utils/Utils.sol"; -import {ExecutorTest} from "./_Executor_Shared.t.sol"; - -import {IExecutor, MAX_NUMBER_OF_BLOBS} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; -import {SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; -import {POINT_EVALUATION_PRECOMPILE_ADDR} from "contracts/common/Config.sol"; -import {L2_PUBDATA_CHUNK_PUBLISHER_ADDR} from "contracts/common/L2ContractAddresses.sol"; - -contract CommittingTest is ExecutorTest { - function test_RevertWhen_CommittingWithWrongLastCommittedBatchData() public { - IExecutor.CommitBatchInfo[] memory newCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - newCommitBatchInfoArray[0] = newCommitBatchInfo; - - IExecutor.StoredBatchInfo memory wrongGenesisStoredBatchInfo = genesisStoredBatchInfo; - wrongGenesisStoredBatchInfo.timestamp = 1000; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("i")); - executor.commitBatches(wrongGenesisStoredBatchInfo, newCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithWrongOrderOfBatches() public { - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.batchNumber = 2; // wrong batch number - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("f")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithWrongNewBatchTimestamp() public { - bytes32 wrongNewBatchTimestamp = Utils.randomBytes32("wrongNewBatchTimestamp"); - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - - wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - wrongNewBatchTimestamp - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("tb")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithTooSmallNewBatchTimestamp() public { - uint256 wrongNewBatchTimestamp = 1; - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(1, 1) - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - wrongNewCommitBatchInfo.timestamp = uint64(wrongNewBatchTimestamp); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("h1")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingTooBigLastL2BatchTimestamp() public { - uint64 wrongNewBatchTimestamp = 0xffffffff; - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(wrongNewBatchTimestamp, wrongNewBatchTimestamp) - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - wrongNewCommitBatchInfo.timestamp = wrongNewBatchTimestamp; - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("h2")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithWrongPreviousBatchHash() public { - bytes32 wrongPreviousBatchHash = Utils.randomBytes32("wrongPreviousBatchHash"); - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - wrongL2Logs[uint256(uint256(SystemLogKey.PREV_BATCH_HASH_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PREV_BATCH_HASH_KEY), - wrongPreviousBatchHash - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("l")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithoutProcessingSystemContextLog() public { - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - delete wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))]; - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("b7")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithProcessingSystemContextLogTwice() public { - bytes[] memory l2Logs = Utils.createSystemLogs(); - - bytes memory wrongL2Logs = abi.encodePacked( - Utils.encodePacked(l2Logs), - // solhint-disable-next-line func-named-parameters - Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - bytes32("") - ) - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = wrongL2Logs; - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("kp")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_UnexpectedL2ToL1Log() public { - address unexpectedAddress = address(0); - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - wrongL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( - true, - unexpectedAddress, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - bytes32("") - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("sc")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithWrongCanonicalTxHash() public { - bytes32 wrongChainedPriorityHash = Utils.randomBytes32("canonicalTxHash"); - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - wrongL2Logs[uint256(uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY))] = Utils.constructL2Log( - true, - L2_BOOTLOADER_ADDRESS, - uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY), - wrongChainedPriorityHash - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("t")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithWrongNumberOfLayer1txs() public { - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - wrongL2Logs[uint256(uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY))] = Utils.constructL2Log( - true, - L2_BOOTLOADER_ADDRESS, - uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY), - bytes32(bytes1(0x01)) - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - wrongNewCommitBatchInfo.numberOfLayer1Txs = 2; - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("ta")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_CommittingWithUnknownSystemLogKey() public { - bytes[] memory l2Logs = Utils.createSystemLogs(); - bytes memory wrongL2Logs = abi.encodePacked( - Utils.encodePacked(l2Logs), - // solhint-disable-next-line func-named-parameters - abi.encodePacked(bytes2(0x0001), bytes2(0x0000), L2_SYSTEM_CONTEXT_ADDRESS, uint256(119), bytes32("")) - ); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = abi.encodePacked(bytes4(0x00000008), wrongL2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("ul")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - - function test_RevertWhen_SystemLogIsFromIncorrectAddress() public { - bytes32[7] memory values = [ - bytes32(""), - bytes32(0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563), - bytes32(""), - bytes32(""), - bytes32(""), - keccak256(""), - bytes32("") - ]; - - bytes[7] memory errors = [ - bytes.concat("lm"), - bytes.concat("ln"), - bytes.concat("lb"), - bytes.concat("sc"), - bytes.concat("sv"), - bytes.concat("bl"), - bytes.concat("bk") - ]; - - for (uint256 i = 0; i < values.length; i++) { - bytes[] memory wrongL2Logs = Utils.createSystemLogs(); - address wrongAddress = makeAddr("randomAddress"); - wrongL2Logs[i] = Utils.constructL2Log(true, wrongAddress, i, values[i]); - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(wrongL2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(errors[i]); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - } - - function test_RevertWhen_SystemLogIsMissing() public { - for (uint256 i = 0; i < 7; i++) { - bytes[] memory l2Logs = Utils.createSystemLogs(); - delete l2Logs[i]; - - IExecutor.CommitBatchInfo memory wrongNewCommitBatchInfo = newCommitBatchInfo; - wrongNewCommitBatchInfo.systemLogs = Utils.encodePacked(l2Logs); - - IExecutor.CommitBatchInfo[] memory wrongNewCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - wrongNewCommitBatchInfoArray[0] = wrongNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("b7")); - executor.commitBatches(genesisStoredBatchInfo, wrongNewCommitBatchInfoArray); - } - } - - function test_SuccessfullyCommitBatch() public { - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563 - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - correctNewCommitBatchInfo.pubdataCommitments = abi.encodePacked( - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", - bytes32(uint256(0xbeef)) - ); - - bytes32[] memory blobHashes = new bytes32[](MAX_NUMBER_OF_BLOBS); - blobHashes[0] = 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563; - - bytes32[] memory blobCommitments = new bytes32[](MAX_NUMBER_OF_BLOBS); - blobCommitments[0] = bytes32(uint256(0xbeef)); - - bytes32 expectedBatchCommitment = Utils.createBatchCommitment( - correctNewCommitBatchInfo, - bytes32(""), - blobCommitments, - blobHashes - ); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - - vm.prank(validator); - - vm.recordLogs(); - - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - - Vm.Log[] memory entries = vm.getRecordedLogs(); - - assertEq(entries.length, 1); - assertEq(entries[0].topics[0], keccak256("BlockCommit(uint256,bytes32,bytes32)")); - assertEq(entries[0].topics[1], bytes32(uint256(1))); // batchNumber - assertEq(entries[0].topics[2], correctNewCommitBatchInfo.newStateRoot); // batchHash - assertEq(entries[0].topics[3], expectedBatchCommitment); // commitment - - uint256 totalBatchesCommitted = getters.getTotalBatchesCommitted(); - assertEq(totalBatchesCommitted, 1); - } - - function test_SuccessfullyCommitBatchWithOneBlob() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - bytes32 versionedHash1 = 0xf39a869f62e75cf5f0bf914688a6b289caf2049435d8e68c5c5e6d05e44913f3; - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(1)), abi.encode(bytes32(0))); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\xf3\x9a\x86\x9f\x62\xe7\x5c\xf5\xf0\xbf\x91\x46\x88\xa6\xb2\x89\xca\xf2\x04\x94\x35\xd8\xe6\x8c\x5c\x5e\x6d\x05\xe4\x49\x13\xf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - versionedHash1 - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.recordLogs(); - - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - - Vm.Log[] memory entries = vm.getRecordedLogs(); - - assertEq(entries.length, 1); - assertEq(entries[0].topics[0], keccak256("BlockCommit(uint256,bytes32,bytes32)")); - assertEq(entries[0].topics[1], bytes32(uint256(1))); // batchNumber - - uint256 totalBatchesCommitted = getters.getTotalBatchesCommitted(); - assertEq(totalBatchesCommitted, 1); - - vm.clearMockedCalls(); - } - - function test_SuccessfullyCommitBatchWithTwoBlob() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - bytes32 versionedHash1 = 0xf39a869f62e75cf5f0bf914688a6b289caf2049435d8e68c5c5e6d05e44913f3; - bytes32 versionedHash2 = 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563; - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(1)), abi.encode(versionedHash2)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(2)), abi.encode(bytes32(0))); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\xf3\x9a\x86\x9f\x62\xe7\x5c\xf5\xf0\xbf\x91\x46\x88\xa6\xb2\x89\xca\xf2\x04\x94\x35\xd8\xe6\x8c\x5c\x5e\x6d\x05\xe4\x49\x13\xf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\x29\x0d\xec\xd9\x54\x8b\x62\xa8\xd6\x03\x45\xa9\x88\x38\x6f\xc8\x4b\xa6\xbc\x95\x48\x40\x08\xf6\x36\x2f\x93\x16\x0e\xf3\xe5\x63\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - versionedHash1 - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_TWO_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_TWO_HASH_KEY), - versionedHash2 - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.recordLogs(); - - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - - Vm.Log[] memory entries = vm.getRecordedLogs(); - - assertEq(entries.length, 1); - assertEq(entries[0].topics[0], keccak256("BlockCommit(uint256,bytes32,bytes32)")); - assertEq(entries[0].topics[1], bytes32(uint256(1))); // batchNumber - - uint256 totalBatchesCommitted = getters.getTotalBatchesCommitted(); - assertEq(totalBatchesCommitted, 1); - - vm.clearMockedCalls(); - } - - function test_RevertWhen_CommittingBatchMoreThanOneBatch() public { - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](2); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[1] = correctNewCommitBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes("e4")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - } - - function test_RevertWhen_EmptyPubdataCommitments() public { - bytes memory pubdataCommitment = "\x01"; - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("pl")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - } - - function test_RevertWhen_PartialPubdataCommitment() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57"; - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("bs")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - } - - function test_RevertWhen_TooManyPubdataCommitments() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("bd")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - } - - function test_RevertWhen_NotEnoughPubdataCommitments() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - bytes32 versionedHash1 = 0xf39a869f62e75cf5f0bf914688a6b289caf2049435d8e68c5c5e6d05e44913f3; - bytes32 versionedHash2 = 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563; - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(1)), abi.encode(versionedHash2)); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\xf3\x9a\x86\x9f\x62\xe7\x5c\xf5\xf0\xbf\x91\x46\x88\xa6\xb2\x89\xca\xf2\x04\x94\x35\xd8\xe6\x8c\x5c\x5e\x6d\x05\xe4\x49\x13\xf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - versionedHash1 - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("lh")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - - vm.clearMockedCalls(); - } - - function test_RevertWhen_BlobDoesNotExist() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - bytes32 versionedHash1 = 0xf39a869f62e75cf5f0bf914688a6b289caf2049435d8e68c5c5e6d05e44913f3; - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(bytes32(0))); - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - versionedHash1 - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("vh")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - - vm.clearMockedCalls(); - } - - function test_RevertWhen_SecondBlobSentWithoutCommitmentData() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - bytes32 versionedHash1 = 0xf39a869f62e75cf5f0bf914688a6b289caf2049435d8e68c5c5e6d05e44913f3; - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(1)), abi.encode(versionedHash1)); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\xf3\x9a\x86\x9f\x62\xe7\x5c\xf5\xf0\xbf\x91\x46\x88\xa6\xb2\x89\xca\xf2\x04\x94\x35\xd8\xe6\x8c\x5c\x5e\x6d\x05\xe4\x49\x13\xf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - versionedHash1 - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("lh")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - - vm.clearMockedCalls(); - } - - function test_RevertWhen_SecondBlobLinearHashZeroWithCommitment() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - bytes32 versionedHash1 = 0xf39a869f62e75cf5f0bf914688a6b289caf2049435d8e68c5c5e6d05e44913f3; - bytes32 versionedHash2 = 0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563; - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(1)), abi.encode(versionedHash2)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(2)), abi.encode(bytes32(0))); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\xf3\x9a\x86\x9f\x62\xe7\x5c\xf5\xf0\xbf\x91\x46\x88\xa6\xb2\x89\xca\xf2\x04\x94\x35\xd8\xe6\x8c\x5c\x5e\x6d\x05\xe4\x49\x13\xf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\x29\x0d\xec\xd9\x54\x8b\x62\xa8\xd6\x03\x45\xa9\x88\x38\x6f\xc8\x4b\xa6\xbc\x95\x48\x40\x08\xf6\x36\x2f\x93\x16\x0e\xf3\xe5\x63\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - versionedHash1 - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_TWO_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_TWO_HASH_KEY), - bytes32(0) - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("bh")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - } - - function test_RevertWhen_SecondBlobLinearHashNotZeroWithEmptyCommitment() public { - bytes - memory pubdataCommitment = "\x01\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2"; - bytes32 versionedHash1 = 0xf39a869f62e75cf5f0bf914688a6b289caf2049435d8e68c5c5e6d05e44913f3; - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(0)), abi.encode(versionedHash1)); - - vm.mockCall(blobVersionedHashRetriever, abi.encode(uint256(1)), abi.encode(bytes32(0))); - - vm.mockCall( - POINT_EVALUATION_PRECOMPILE_ADDR, - "\xf3\x9a\x86\x9f\x62\xe7\x5c\xf5\xf0\xbf\x91\x46\x88\xa6\xb2\x89\xca\xf2\x04\x94\x35\xd8\xe6\x8c\x5c\x5e\x6d\x05\xe4\x49\x13\xf3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x3d\x53\x8d\x91\xd4\x77\xb0\xf8\xf7\x7e\x19\x52\x48\x7f\x00\xb8\xdf\x41\xda\x90\x5c\x08\x75\xc5\xc9\x9b\xa1\x92\x26\x84\x0d\x0d\x0a\x25\x26\xee\x22\xc7\x96\x60\x65\x7c\xbe\x01\x95\x33\x5b\x44\x69\xbd\x92\x94\x6f\x7f\x74\xae\xc5\xce\xef\x31\xf4\x32\x53\xd4\x08\x96\x72\x65\xfa\x85\x5a\xc8\xa0\x0a\x19\x52\x93\x6e\x0f\xe9\x97\x01\xc0\xa4\x32\xa1\x32\x2c\x45\x67\x24\xf7\xad\xd8\xa5\xb4\x7a\x51\xda\x52\x17\x06\x06\x95\x34\x61\xab\xd7\x5b\x91\x49\xc7\xc7\x91\xf4\x07\xfd\xbc\xf8\x39\x53\x2c\xb1\x08\xe8\xa5\x00\x64\x40\xcf\x21\xbf\x68\x87\x20\x5a\xcf\x44\x3b\x66\x3a\x57\xf2", - "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x73\xed\xa7\x53\x29\x9d\x7d\x48\x33\x39\xd8\x08\x09\xa1\xd8\x05\x53\xbd\xa4\x02\xff\xfe\x5b\xfe\xff\xff\xff\xff\x00\x00\x00\x01" - ); - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_ONE_HASH_KEY), - versionedHash1 - ); - - correctL2Logs[uint256(SystemLogKey.BLOB_TWO_HASH_KEY)] = Utils.constructL2Log( - true, - L2_PUBDATA_CHUNK_PUBLISHER_ADDR, - uint256(SystemLogKey.BLOB_TWO_HASH_KEY), - versionedHash1 - ); - - IExecutor.CommitBatchInfo memory correctNewCommitBatchInfo = newCommitBatchInfo; - correctNewCommitBatchInfo.systemLogs = Utils.encodePacked(correctL2Logs); - - IExecutor.CommitBatchInfo[] memory correctCommitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - correctCommitBatchInfoArray[0] = correctNewCommitBatchInfo; - correctCommitBatchInfoArray[0].pubdataCommitments = pubdataCommitment; - - vm.prank(validator); - - vm.expectRevert(bytes("bh")); - executor.commitBatches(genesisStoredBatchInfo, correctCommitBatchInfoArray); - - vm.clearMockedCalls(); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/Executor/Proving.t.sol b/l1-contracts/test/foundry/unit/concrete/Executor/Proving.t.sol deleted file mode 100644 index 184de78e2..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Executor/Proving.t.sol +++ /dev/null @@ -1,100 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {Vm} from "forge-std/Test.sol"; -import {Utils, L2_SYSTEM_CONTEXT_ADDRESS} from "../Utils/Utils.sol"; - -import {ExecutorTest} from "./_Executor_Shared.t.sol"; - -import {COMMIT_TIMESTAMP_NOT_OLDER} from "contracts/common/Config.sol"; -import {IExecutor, SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; - -contract ProvingTest is ExecutorTest { - function setUp() public { - vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1); - currentTimestamp = block.timestamp; - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - bytes memory l2Logs = Utils.encodePacked(correctL2Logs); - - newCommitBatchInfo.timestamp = uint64(currentTimestamp); - newCommitBatchInfo.systemLogs = l2Logs; - - IExecutor.CommitBatchInfo[] memory commitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - commitBatchInfoArray[0] = newCommitBatchInfo; - - vm.prank(validator); - vm.recordLogs(); - executor.commitBatches(genesisStoredBatchInfo, commitBatchInfoArray); - Vm.Log[] memory entries = vm.getRecordedLogs(); - - newStoredBatchInfo = IExecutor.StoredBatchInfo({ - batchNumber: 1, - batchHash: entries[0].topics[2], - indexRepeatedStorageChanges: 0, - numberOfLayer1Txs: 0, - priorityOperationsHash: keccak256(""), - l2LogsTreeRoot: 0, - timestamp: currentTimestamp, - commitment: entries[0].topics[3] - }); - } - - function test_RevertWhen_ProvingWithWrongPreviousBlockData() public { - IExecutor.StoredBatchInfo memory wrongPreviousStoredBatchInfo = genesisStoredBatchInfo; - wrongPreviousStoredBatchInfo.batchNumber = 10; // Correct is 0 - - IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); - storedBatchInfoArray[0] = newStoredBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("t1")); - executor.proveBatches(wrongPreviousStoredBatchInfo, storedBatchInfoArray, proofInput); - } - - function test_RevertWhen_ProvingWithWrongCommittedBlock() public { - IExecutor.StoredBatchInfo memory wrongNewStoredBatchInfo = newStoredBatchInfo; - wrongNewStoredBatchInfo.batchNumber = 10; // Correct is 1 - - IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); - storedBatchInfoArray[0] = wrongNewStoredBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("o1")); - executor.proveBatches(genesisStoredBatchInfo, storedBatchInfoArray, proofInput); - } - - function test_RevertWhen_ProvingRevertedBlockWithoutCommittingAgain() public { - vm.prank(validator); - executor.revertBatches(0); - - IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); - storedBatchInfoArray[0] = newStoredBatchInfo; - - vm.prank(validator); - - vm.expectRevert(bytes.concat("q")); - executor.proveBatches(genesisStoredBatchInfo, storedBatchInfoArray, proofInput); - } - - function test_SuccessfulProve() public { - IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); - storedBatchInfoArray[0] = newStoredBatchInfo; - - vm.prank(validator); - - executor.proveBatches(genesisStoredBatchInfo, storedBatchInfoArray, proofInput); - - uint256 totalBlocksVerified = getters.getTotalBlocksVerified(); - assertEq(totalBlocksVerified, 1); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/Executor/Reverting.t.sol b/l1-contracts/test/foundry/unit/concrete/Executor/Reverting.t.sol deleted file mode 100644 index 9419c9dad..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Executor/Reverting.t.sol +++ /dev/null @@ -1,78 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {Vm} from "forge-std/Test.sol"; -import {Utils, L2_SYSTEM_CONTEXT_ADDRESS} from "../Utils/Utils.sol"; - -import {ExecutorTest} from "./_Executor_Shared.t.sol"; - -import {COMMIT_TIMESTAMP_NOT_OLDER} from "contracts/common/Config.sol"; -import {IExecutor, SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; - -contract RevertingTest is ExecutorTest { - function setUp() public { - vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1); - currentTimestamp = block.timestamp; - - bytes[] memory correctL2Logs = Utils.createSystemLogs(); - correctL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - bytes memory l2Logs = Utils.encodePacked(correctL2Logs); - newCommitBatchInfo.timestamp = uint64(currentTimestamp); - newCommitBatchInfo.systemLogs = l2Logs; - - IExecutor.CommitBatchInfo[] memory commitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - commitBatchInfoArray[0] = newCommitBatchInfo; - - vm.prank(validator); - vm.recordLogs(); - executor.commitBatches(genesisStoredBatchInfo, commitBatchInfoArray); - Vm.Log[] memory entries = vm.getRecordedLogs(); - - newStoredBatchInfo = IExecutor.StoredBatchInfo({ - batchNumber: 1, - batchHash: entries[0].topics[2], - indexRepeatedStorageChanges: 0, - numberOfLayer1Txs: 0, - priorityOperationsHash: keccak256(""), - l2LogsTreeRoot: 0, - timestamp: currentTimestamp, - commitment: entries[0].topics[3] - }); - - IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); - storedBatchInfoArray[0] = newStoredBatchInfo; - - vm.prank(validator); - - executor.proveBatches(genesisStoredBatchInfo, storedBatchInfoArray, proofInput); - } - - function test_RevertWhen_RevertingMoreBatchesThanAlreadyCommitted() public { - vm.prank(validator); - vm.expectRevert(bytes.concat("v1")); - executor.revertBatches(10); - } - - function test_SuccessfulRevert() public { - uint256 totalBlocksCommittedBefore = getters.getTotalBlocksCommitted(); - assertEq(totalBlocksCommittedBefore, 1, "totalBlocksCommittedBefore"); - - uint256 totalBlocksVerifiedBefore = getters.getTotalBlocksVerified(); - assertEq(totalBlocksVerifiedBefore, 1, "totalBlocksVerifiedBefore"); - - vm.prank(validator); - executor.revertBatches(0); - - uint256 totalBlocksCommitted = getters.getTotalBlocksCommitted(); - assertEq(totalBlocksCommitted, 0, "totalBlocksCommitted"); - - uint256 totalBlocksVerified = getters.getTotalBlocksVerified(); - assertEq(totalBlocksVerified, 0, "totalBlocksVerified"); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/CheckTransaction.sol b/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/CheckTransaction.sol new file mode 100644 index 000000000..3231a7144 --- /dev/null +++ b/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/CheckTransaction.sol @@ -0,0 +1,89 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {GatewayTransactionFiltererTest} from "./_GatewayTransactionFilterer_Shared.t.sol"; + +import {IGetters} from "contracts/state-transition/chain-interfaces/IGetters.sol"; +import {IL2Bridge} from "contracts/bridge/interfaces/IL2Bridge.sol"; +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; +import {AlreadyWhitelisted, InvalidSelector, NotWhitelisted} from "contracts/common/L1ContractErrors.sol"; + +contract CheckTransactionTest is GatewayTransactionFiltererTest { + function test_TransactionAllowedOnlyFromWhitelistedSenderWhichIsNotAssetRouter() public { + bytes memory txCalladata = abi.encodeCall(IL2Bridge.finalizeDeposit, (bytes32("0x12345"), bytes("0x23456"))); + vm.startPrank(owner); + vm.mockCall( + bridgehub, + abi.encodeWithSelector(IBridgehub.ctmAssetIdToAddress.selector), + abi.encode(address(0)) // Return any address + ); + bool isTxAllowed = transactionFiltererProxy.isTransactionAllowed( + sender, + address(0), + 0, + 0, + txCalladata, + address(0) + ); // Other arguments do not make a difference for the test + + assertEq(isTxAllowed, false, "Transaction should not be allowed"); + + transactionFiltererProxy.grantWhitelist(sender); + isTxAllowed = transactionFiltererProxy.isTransactionAllowed(sender, address(0), 0, 0, txCalladata, address(0)); // Other arguments do not make a difference for the test + + assertEq(isTxAllowed, true, "Transaction should be allowed"); + + transactionFiltererProxy.grantWhitelist(assetRouter); + isTxAllowed = transactionFiltererProxy.isTransactionAllowed( + assetRouter, + address(0), + 0, + 0, + txCalladata, + address(0) + ); // Other arguments do not make a difference for the test + + assertEq(isTxAllowed, false, "Transaction should not be allowed"); + + vm.stopPrank(); + } + + function test_TransactionAllowedFromWhitelistedSenderForChainBridging() public { + address stm = address(0x6060606); + bytes memory txCalladata = abi.encodeCall(IL2Bridge.finalizeDeposit, (bytes32("0x12345"), bytes("0x23456"))); + vm.startPrank(owner); + vm.mockCall( + bridgehub, + abi.encodeWithSelector(IBridgehub.ctmAssetIdToAddress.selector), + abi.encode(stm) // Return random address + ); + + transactionFiltererProxy.grantWhitelist(assetRouter); + bool isTxAllowed = transactionFiltererProxy.isTransactionAllowed( + assetRouter, + address(0), + 0, + 0, + txCalladata, + address(0) + ); // Other arguments do not make a difference for the test + + assertEq(isTxAllowed, true, "Transaction should be allowed"); + + vm.stopPrank(); + } + + function test_TransactionFailsWithInvalidSelectorEvenIfTheSenderIsAR() public { + bytes memory txCalladata = abi.encodeCall(IL2Bridge.withdraw, (bytes32("0x12345"), bytes("0x23456"))); + vm.prank(owner); + vm.expectRevert(abi.encodeWithSelector(InvalidSelector.selector, IL2Bridge.withdraw.selector)); + bool isTxAllowed = transactionFiltererProxy.isTransactionAllowed( + assetRouter, + address(0), + 0, + 0, + txCalladata, + address(0) + ); // Other arguments do not make a difference for the test + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/ManageWhitelist.sol b/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/ManageWhitelist.sol new file mode 100644 index 000000000..be176e150 --- /dev/null +++ b/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/ManageWhitelist.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {GatewayTransactionFiltererTest} from "./_GatewayTransactionFilterer_Shared.t.sol"; + +import {AlreadyWhitelisted, NotWhitelisted} from "contracts/common/L1ContractErrors.sol"; + +contract ManageWhitelistTest is GatewayTransactionFiltererTest { + function test_GrantingWhitelistToSender() public { + vm.startPrank(owner); + transactionFiltererProxy.grantWhitelist(sender); + + assertEq( + transactionFiltererProxy.whitelistedSenders(sender), + true, + "Whitelisting of sender was not successful" + ); + + vm.expectRevert(abi.encodeWithSelector(AlreadyWhitelisted.selector, sender)); + transactionFiltererProxy.grantWhitelist(sender); + } + + function test_RevokeWhitelistFromSender() public { + vm.startPrank(owner); + vm.expectRevert(abi.encodeWithSelector(NotWhitelisted.selector, sender)); + transactionFiltererProxy.revokeWhitelist(sender); + + transactionFiltererProxy.grantWhitelist(sender); + transactionFiltererProxy.revokeWhitelist(sender); + + assertEq( + transactionFiltererProxy.whitelistedSenders(sender), + false, + "Revoking the sender from whitelist was not successful" + ); + } +} diff --git a/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/_GatewayTransactionFilterer_Shared.t.sol b/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/_GatewayTransactionFilterer_Shared.t.sol new file mode 100644 index 000000000..1b3646ccb --- /dev/null +++ b/l1-contracts/test/foundry/unit/concrete/GatewayTransactionFilterer/_GatewayTransactionFilterer_Shared.t.sol @@ -0,0 +1,38 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.21; + +import {Test} from "forge-std/Test.sol"; + +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; + +import {IBridgehub} from "contracts/bridgehub/IBridgehub.sol"; + +import {GatewayTransactionFilterer} from "contracts/transactionFilterer/GatewayTransactionFilterer.sol"; + +contract GatewayTransactionFiltererTest is Test { + GatewayTransactionFilterer internal transactionFiltererProxy; + GatewayTransactionFilterer internal transactionFiltererImplementation; + address internal constant owner = address(0x1010101); + address internal constant admin = address(0x2020202); + address internal constant sender = address(0x3030303); + address internal constant bridgehub = address(0x5050505); + address internal constant assetRouter = address(0x4040404); + + constructor() { + transactionFiltererImplementation = new GatewayTransactionFilterer(IBridgehub(bridgehub), assetRouter); + + transactionFiltererProxy = GatewayTransactionFilterer( + address( + new TransparentUpgradeableProxy( + address(transactionFiltererImplementation), + admin, + abi.encodeCall(GatewayTransactionFilterer.initialize, owner) + ) + ) + ); + } + + // add this to be excluded from coverage report + function test() internal virtual {} +} diff --git a/l1-contracts/test/foundry/unit/concrete/Verifier/VerifierRecursive.t.sol b/l1-contracts/test/foundry/unit/concrete/Verifier/VerifierRecursive.t.sol deleted file mode 100644 index 69bad2303..000000000 --- a/l1-contracts/test/foundry/unit/concrete/Verifier/VerifierRecursive.t.sol +++ /dev/null @@ -1,55 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {VerifierTestTest} from "./Verifier.t.sol"; -import {VerifierRecursiveTest} from "contracts/dev-contracts/test/VerifierRecursiveTest.sol"; - -contract VerifierRecursiveTestTest is VerifierTestTest { - function setUp() public override { - super.setUp(); - - recursiveAggregationInput.push(2257920826825449939414463854743099397427742128922725774525544832270890253504); - recursiveAggregationInput.push(9091218701914748532331969127001446391756173432977615061129552313204917562530); - recursiveAggregationInput.push(16188304989094043810949359833767911976672882599560690320245309499206765021563); - recursiveAggregationInput.push(3201093556796962656759050531176732990872300033146738631772984017549903765305); - - verifier = new VerifierRecursiveTest(); - } - - function testMoreThan4WordsRecursiveInput_shouldRevert() public { - uint256[] memory newRecursiveAggregationInput = new uint256[](recursiveAggregationInput.length + 1); - - for (uint256 i = 0; i < recursiveAggregationInput.length; i++) { - newRecursiveAggregationInput[i] = recursiveAggregationInput[i]; - } - newRecursiveAggregationInput[newRecursiveAggregationInput.length - 1] = recursiveAggregationInput[ - recursiveAggregationInput.length - 1 - ]; - - vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(publicInputs, serializedProof, newRecursiveAggregationInput); - } - - function testEmptyRecursiveInput_shouldRevert() public { - uint256[] memory newRecursiveAggregationInput; - - vm.expectRevert(bytes("loadProof: Proof is invalid")); - verifier.verify(publicInputs, serializedProof, newRecursiveAggregationInput); - } - - function testInvalidRecursiveInput_shouldRevert() public { - uint256[] memory newRecursiveAggregationInput = new uint256[](4); - newRecursiveAggregationInput[0] = 1; - newRecursiveAggregationInput[1] = 2; - newRecursiveAggregationInput[2] = 1; - newRecursiveAggregationInput[3] = 2; - - vm.expectRevert(bytes("finalPairing: pairing failure")); - verifier.verify(publicInputs, serializedProof, newRecursiveAggregationInput); - } - - function testVerificationKeyHash() public override { - bytes32 verificationKeyHash = verifier.verificationKeyHash(); - assertEq(verificationKeyHash, 0x88b3ddc4ed85974c7e14297dcad4097169440305c05fdb6441ca8dfd77cd7fa7); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/CreateNewChain.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/CreateNewChain.t.sol deleted file mode 100644 index dc66d38b9..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/CreateNewChain.t.sol +++ /dev/null @@ -1,39 +0,0 @@ -// // SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; -import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; - -contract createNewChainTest is StateTransitionManagerTest { - function test_RevertWhen_InitialDiamondCutHashMismatch() public { - Diamond.DiamondCutData memory initialDiamondCutData = getDiamondCutData(sharedBridge); - - vm.expectRevert(bytes("STM: initial cutHash mismatch")); - - createNewChain(initialDiamondCutData); - } - - function test_RevertWhen_CalledNotByBridgehub() public { - Diamond.DiamondCutData memory initialDiamondCutData = getDiamondCutData(diamondInit); - - vm.expectRevert(bytes("STM: only bridgehub")); - - chainContractAddress.createNewChain({ - _chainId: chainId, - _baseToken: baseToken, - _sharedBridge: sharedBridge, - _admin: admin, - _diamondCut: abi.encode(initialDiamondCutData) - }); - } - - function test_SuccessfulCreationOfNewChain() public { - createNewChain(getDiamondCutData(diamondInit)); - - address admin = chainContractAddress.getChainAdmin(chainId); - address newChainAddress = chainContractAddress.getHyperchain(chainId); - - assertEq(newChainAdmin, admin); - assertNotEq(newChainAddress, address(0)); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/FreezeChain.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/FreezeChain.t.sol deleted file mode 100644 index 590d5d4ab..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/FreezeChain.t.sol +++ /dev/null @@ -1,29 +0,0 @@ -// // SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; -import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; - -contract freezeChainTest is StateTransitionManagerTest { - function test_FreezingChain() public { - createNewChain(getDiamondCutData(diamondInit)); - - address newChainAddress = chainContractAddress.getHyperchain(chainId); - GettersFacet gettersFacet = GettersFacet(newChainAddress); - bool isChainFrozen = gettersFacet.isDiamondStorageFrozen(); - assertEq(isChainFrozen, false); - - vm.stopPrank(); - vm.startPrank(governor); - - chainContractAddress.freezeChain(block.chainid); - - // Repeated call should revert - vm.expectRevert(bytes.concat("q1")); // storage frozen - chainContractAddress.freezeChain(block.chainid); - - // Call fails as storage is frozen - vm.expectRevert(bytes.concat("q1")); - isChainFrozen = gettersFacet.isDiamondStorageFrozen(); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/RevertBatches.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/RevertBatches.t.sol deleted file mode 100644 index 2113f3467..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/RevertBatches.t.sol +++ /dev/null @@ -1,148 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {Vm} from "forge-std/Test.sol"; - -import {Utils, L2_SYSTEM_CONTEXT_ADDRESS} from "../../Utils/Utils.sol"; -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; - -import {COMMIT_TIMESTAMP_NOT_OLDER, DEFAULT_L2_LOGS_TREE_ROOT_HASH, EMPTY_STRING_KECCAK} from "contracts/common/Config.sol"; -import {IExecutor, SystemLogKey} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; -import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; -import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; -import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; -import {IExecutor} from "contracts/state-transition/chain-interfaces/IExecutor.sol"; - -contract revertBatchesTest is StateTransitionManagerTest { - // Items for logs & commits - uint256 internal currentTimestamp; - IExecutor.CommitBatchInfo internal newCommitBatchInfo; - IExecutor.StoredBatchInfo internal newStoredBatchInfo; - IExecutor.StoredBatchInfo internal genesisStoredBatchInfo; - IExecutor.ProofInput internal proofInput; - - // Facets exposing the diamond - AdminFacet internal adminFacet; - ExecutorFacet internal executorFacet; - GettersFacet internal gettersFacet; - - function test_SuccessfulBatchReverting() public { - createNewChain(getDiamondCutData(diamondInit)); - - address newChainAddress = chainContractAddress.getHyperchain(chainId); - - executorFacet = ExecutorFacet(address(newChainAddress)); - gettersFacet = GettersFacet(address(newChainAddress)); - adminFacet = AdminFacet(address(newChainAddress)); - - // Initial setup for logs & commits - vm.stopPrank(); - vm.startPrank(newChainAdmin); - - genesisStoredBatchInfo = IExecutor.StoredBatchInfo({ - batchNumber: 0, - batchHash: bytes32(uint256(0x01)), - indexRepeatedStorageChanges: 1, - numberOfLayer1Txs: 0, - priorityOperationsHash: EMPTY_STRING_KECCAK, - l2LogsTreeRoot: DEFAULT_L2_LOGS_TREE_ROOT_HASH, - timestamp: 0, - commitment: bytes32(uint256(0x01)) - }); - - adminFacet.setTokenMultiplier(1, 1); - - uint256[] memory recursiveAggregationInput; - uint256[] memory serializedProof; - proofInput = IExecutor.ProofInput(recursiveAggregationInput, serializedProof); - - // foundry's default value is 1 for the block's timestamp, it is expected - // that block.timestamp > COMMIT_TIMESTAMP_NOT_OLDER + 1 - vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1 + 1); - currentTimestamp = block.timestamp; - - bytes memory l2Logs = Utils.encodePacked(Utils.createSystemLogs()); - newCommitBatchInfo = IExecutor.CommitBatchInfo({ - batchNumber: 1, - timestamp: uint64(currentTimestamp), - indexRepeatedStorageChanges: 1, - newStateRoot: Utils.randomBytes32("newStateRoot"), - numberOfLayer1Txs: 0, - priorityOperationsHash: keccak256(""), - bootloaderHeapInitialContentsHash: Utils.randomBytes32("bootloaderHeapInitialContentsHash"), - eventsQueueStateHash: Utils.randomBytes32("eventsQueueStateHash"), - systemLogs: l2Logs, - pubdataCommitments: "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" - }); - - // Commit & prove batches - vm.warp(COMMIT_TIMESTAMP_NOT_OLDER + 1); - currentTimestamp = block.timestamp; - - bytes32 expectedSystemContractUpgradeTxHash = gettersFacet.getL2SystemContractsUpgradeTxHash(); - bytes[] memory correctL2Logs = Utils.createSystemLogsWithUpgradeTransaction( - expectedSystemContractUpgradeTxHash - ); - - correctL2Logs[uint256(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY), - Utils.packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) - ); - - correctL2Logs[uint256(uint256(SystemLogKey.PREV_BATCH_HASH_KEY))] = Utils.constructL2Log( - true, - L2_SYSTEM_CONTEXT_ADDRESS, - uint256(SystemLogKey.PREV_BATCH_HASH_KEY), - bytes32(uint256(0x01)) - ); - - l2Logs = Utils.encodePacked(correctL2Logs); - newCommitBatchInfo.timestamp = uint64(currentTimestamp); - newCommitBatchInfo.systemLogs = l2Logs; - - IExecutor.CommitBatchInfo[] memory commitBatchInfoArray = new IExecutor.CommitBatchInfo[](1); - commitBatchInfoArray[0] = newCommitBatchInfo; - - vm.stopPrank(); - vm.startPrank(validator); - vm.recordLogs(); - executorFacet.commitBatches(genesisStoredBatchInfo, commitBatchInfoArray); - Vm.Log[] memory entries = vm.getRecordedLogs(); - - newStoredBatchInfo = IExecutor.StoredBatchInfo({ - batchNumber: 1, - batchHash: entries[0].topics[2], - indexRepeatedStorageChanges: 1, - numberOfLayer1Txs: 0, - priorityOperationsHash: keccak256(""), - l2LogsTreeRoot: 0, - timestamp: currentTimestamp, - commitment: entries[0].topics[3] - }); - - IExecutor.StoredBatchInfo[] memory storedBatchInfoArray = new IExecutor.StoredBatchInfo[](1); - storedBatchInfoArray[0] = newStoredBatchInfo; - - executorFacet.proveBatches(genesisStoredBatchInfo, storedBatchInfoArray, proofInput); - - // Test batch revert triggered from STM - vm.stopPrank(); - vm.startPrank(governor); - - uint256 totalBlocksCommittedBefore = gettersFacet.getTotalBlocksCommitted(); - assertEq(totalBlocksCommittedBefore, 1, "totalBlocksCommittedBefore"); - - uint256 totalBlocksVerifiedBefore = gettersFacet.getTotalBlocksVerified(); - assertEq(totalBlocksVerifiedBefore, 1, "totalBlocksVerifiedBefore"); - - chainContractAddress.revertBatches(chainId, 0); - - uint256 totalBlocksCommitted = gettersFacet.getTotalBlocksCommitted(); - assertEq(totalBlocksCommitted, 0, "totalBlocksCommitted"); - - uint256 totalBlocksVerified = gettersFacet.getTotalBlocksVerified(); - assertEq(totalBlocksVerified, 0, "totalBlocksVerified"); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetValidatorTimelock.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetValidatorTimelock.t.sol deleted file mode 100644 index d290a8767..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/SetValidatorTimelock.t.sol +++ /dev/null @@ -1,23 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; - -contract setValidatorTimelockTest is StateTransitionManagerTest { - function test_SettingValidatorTimelock() public { - assertEq( - chainContractAddress.validatorTimelock(), - validator, - "Initial validator timelock address is not correct" - ); - - address newValidatorTimelock = address(0x0000000000000000000000000000000000004235); - chainContractAddress.setValidatorTimelock(newValidatorTimelock); - - assertEq( - chainContractAddress.validatorTimelock(), - newValidatorTimelock, - "Validator timelock update was not successful" - ); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/StateTransitionOwnerZero.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/StateTransitionOwnerZero.t.sol deleted file mode 100644 index 8fae0aa1e..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/StateTransitionOwnerZero.t.sol +++ /dev/null @@ -1,33 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; -import {StateTransitionManagerTest} from "./_StateTransitionManager_Shared.t.sol"; -import {StateTransitionManager} from "contracts/state-transition/StateTransitionManager.sol"; -import {StateTransitionManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IStateTransitionManager.sol"; - -contract initializingSTMOwnerZeroTest is StateTransitionManagerTest { - function test_InitializingSTMWithGovernorZeroShouldRevert() public { - ChainCreationParams memory chainCreationParams = ChainCreationParams({ - genesisUpgrade: address(genesisUpgradeContract), - genesisBatchHash: bytes32(uint256(0x01)), - genesisIndexRepeatedStorageChanges: 1, - genesisBatchCommitment: bytes32(uint256(0x01)), - diamondCut: getDiamondCutData(address(diamondInit)) - }); - - StateTransitionManagerInitializeData memory stmInitializeDataNoOwner = StateTransitionManagerInitializeData({ - owner: address(0), - validatorTimelock: validator, - chainCreationParams: chainCreationParams, - protocolVersion: 0 - }); - - vm.expectRevert(bytes("STM: owner zero")); - new TransparentUpgradeableProxy( - address(stateTransitionManager), - admin, - abi.encodeCall(StateTransitionManager.initialize, stmInitializeDataNoOwner) - ); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/_StateTransitionManager_Shared.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/_StateTransitionManager_Shared.t.sol deleted file mode 100644 index 2cdbd8e00..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/StateTransitionManager/_StateTransitionManager_Shared.t.sol +++ /dev/null @@ -1,144 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.24; - -import {Test} from "forge-std/Test.sol"; - -import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; - -import {Utils} from "foundry-test/unit/concrete/Utils/Utils.sol"; -import {UtilsFacet} from "foundry-test/unit/concrete/Utils/UtilsFacet.sol"; -import {AdminFacet} from "contracts/state-transition/chain-deps/facets/Admin.sol"; -import {ExecutorFacet} from "contracts/state-transition/chain-deps/facets/Executor.sol"; -import {GettersFacet} from "contracts/state-transition/chain-deps/facets/Getters.sol"; -import {Diamond} from "contracts/state-transition/libraries/Diamond.sol"; -import {DiamondInit} from "contracts/state-transition/chain-deps/DiamondInit.sol"; -import {GenesisUpgrade} from "contracts/upgrades/GenesisUpgrade.sol"; -import {InitializeDataNewChain} from "contracts/state-transition/chain-interfaces/IDiamondInit.sol"; -import {StateTransitionManager} from "contracts/state-transition/StateTransitionManager.sol"; -import {StateTransitionManagerInitializeData, ChainCreationParams} from "contracts/state-transition/IStateTransitionManager.sol"; -import {TestnetVerifier} from "contracts/state-transition/TestnetVerifier.sol"; - -contract StateTransitionManagerTest is Test { - StateTransitionManager internal stateTransitionManager; - StateTransitionManager internal chainContractAddress; - GenesisUpgrade internal genesisUpgradeContract; - address internal bridgehub; - address internal diamondInit; - address internal constant governor = address(0x1010101); - address internal constant admin = address(0x2020202); - address internal constant baseToken = address(0x3030303); - address internal constant sharedBridge = address(0x4040404); - address internal constant validator = address(0x5050505); - address internal newChainAdmin; - uint256 chainId = block.chainid; - address internal testnetVerifier = address(new TestnetVerifier()); - - Diamond.FacetCut[] internal facetCuts; - - function setUp() public { - bridgehub = makeAddr("bridgehub"); - newChainAdmin = makeAddr("chainadmin"); - - vm.startPrank(bridgehub); - stateTransitionManager = new StateTransitionManager(bridgehub, type(uint256).max); - diamondInit = address(new DiamondInit()); - genesisUpgradeContract = new GenesisUpgrade(); - - facetCuts.push( - Diamond.FacetCut({ - facet: address(new UtilsFacet()), - action: Diamond.Action.Add, - isFreezable: true, - selectors: Utils.getUtilsFacetSelectors() - }) - ); - facetCuts.push( - Diamond.FacetCut({ - facet: address(new AdminFacet()), - action: Diamond.Action.Add, - isFreezable: true, - selectors: Utils.getAdminSelectors() - }) - ); - facetCuts.push( - Diamond.FacetCut({ - facet: address(new ExecutorFacet()), - action: Diamond.Action.Add, - isFreezable: true, - selectors: Utils.getExecutorSelectors() - }) - ); - facetCuts.push( - Diamond.FacetCut({ - facet: address(new GettersFacet()), - action: Diamond.Action.Add, - isFreezable: true, - selectors: Utils.getGettersSelectors() - }) - ); - - ChainCreationParams memory chainCreationParams = ChainCreationParams({ - genesisUpgrade: address(genesisUpgradeContract), - genesisBatchHash: bytes32(uint256(0x01)), - genesisIndexRepeatedStorageChanges: 0x01, - genesisBatchCommitment: bytes32(uint256(0x01)), - diamondCut: getDiamondCutData(address(diamondInit)) - }); - - StateTransitionManagerInitializeData memory stmInitializeDataNoGovernor = StateTransitionManagerInitializeData({ - owner: address(0), - validatorTimelock: validator, - chainCreationParams: chainCreationParams, - protocolVersion: 0 - }); - - vm.expectRevert(bytes.concat("STM: owner zero")); - new TransparentUpgradeableProxy( - address(stateTransitionManager), - admin, - abi.encodeCall(StateTransitionManager.initialize, stmInitializeDataNoGovernor) - ); - - StateTransitionManagerInitializeData memory stmInitializeData = StateTransitionManagerInitializeData({ - owner: governor, - validatorTimelock: validator, - chainCreationParams: chainCreationParams, - protocolVersion: 0 - }); - - TransparentUpgradeableProxy transparentUpgradeableProxy = new TransparentUpgradeableProxy( - address(stateTransitionManager), - admin, - abi.encodeCall(StateTransitionManager.initialize, stmInitializeData) - ); - chainContractAddress = StateTransitionManager(address(transparentUpgradeableProxy)); - - vm.stopPrank(); - vm.startPrank(governor); - } - - function getDiamondCutData(address _diamondInit) internal returns (Diamond.DiamondCutData memory) { - InitializeDataNewChain memory initializeData = Utils.makeInitializeDataForNewChain(testnetVerifier); - - bytes memory initCalldata = abi.encode(initializeData); - - return Diamond.DiamondCutData({facetCuts: facetCuts, initAddress: _diamondInit, initCalldata: initCalldata}); - } - - function createNewChain(Diamond.DiamondCutData memory _diamondCut) internal { - vm.stopPrank(); - vm.startPrank(bridgehub); - - chainContractAddress.createNewChain({ - _chainId: chainId, - _baseToken: baseToken, - _sharedBridge: sharedBridge, - _admin: newChainAdmin, - _diamondCut: abi.encode(_diamondCut) - }); - } - - // add this to be excluded from coverage report - function test() internal virtual {} -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/FreezeDiamond.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/FreezeDiamond.t.sol deleted file mode 100644 index 8e79f4fc3..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/FreezeDiamond.t.sol +++ /dev/null @@ -1,19 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; - -contract FreezeDiamondTest is AdminTest { - event Freeze(); - - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); - - vm.expectRevert(ERROR_ONLY_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonStateTransitionManager); - adminFacet.freezeDiamond(); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/UnfreezeDiamond.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/UnfreezeDiamond.t.sol deleted file mode 100644 index b7f1fa124..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Admin/UnfreezeDiamond.t.sol +++ /dev/null @@ -1,30 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {AdminTest} from "./_Admin_Shared.t.sol"; -import {ERROR_ONLY_STATE_TRANSITION_MANAGER} from "../Base/_Base_Shared.t.sol"; - -contract UnfreezeDiamondTest is AdminTest { - event Unfreeze(); - - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); - - vm.expectRevert(ERROR_ONLY_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonStateTransitionManager); - adminFacet.unfreezeDiamond(); - } - - function test_revertWhen_diamondIsNotFrozen() public { - address admin = utilsFacet.util_getStateTransitionManager(); - - utilsFacet.util_setIsFrozen(false); - - vm.expectRevert(bytes.concat("a7")); - - vm.startPrank(admin); - adminFacet.unfreezeDiamond(); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernorOrStateTransitionManager.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernorOrStateTransitionManager.t.sol deleted file mode 100644 index da2d6fccf..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyGovernorOrStateTransitionManager.t.sol +++ /dev/null @@ -1,39 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {ZkSyncHyperchainBaseTest, ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER} from "./_Base_Shared.t.sol"; - -contract OnlyAdminOrStateTransitionManagerTest is ZkSyncHyperchainBaseTest { - function test_revertWhen_calledByNonAdmin() public { - address nonAdmin = makeAddr("nonAdmin"); - - vm.expectRevert(ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonAdmin); - testBaseFacet.functionWithOnlyAdminOrStateTransitionManagerModifier(); - } - - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); - - vm.expectRevert(ERROR_ONLY_ADMIN_OR_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonStateTransitionManager); - testBaseFacet.functionWithOnlyAdminOrStateTransitionManagerModifier(); - } - - function test_successfulCallWhenCalledByAdmin() public { - address admin = utilsFacet.util_getAdmin(); - - vm.startPrank(admin); - testBaseFacet.functionWithOnlyAdminOrStateTransitionManagerModifier(); - } - - function test_successfulCallWhenCalledByStateTransitionManager() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); - - vm.startPrank(stateTransitionManager); - testBaseFacet.functionWithOnlyAdminOrStateTransitionManagerModifier(); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyStateTransitionManager.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyStateTransitionManager.t.sol deleted file mode 100644 index f6aafb661..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Base/OnlyStateTransitionManager.t.sol +++ /dev/null @@ -1,23 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {ZkSyncHyperchainBaseTest, ERROR_ONLY_STATE_TRANSITION_MANAGER} from "./_Base_Shared.t.sol"; - -contract OnlyStateTransitionManagerTest is ZkSyncHyperchainBaseTest { - function test_revertWhen_calledByNonStateTransitionManager() public { - address nonStateTransitionManager = makeAddr("nonStateTransitionManager"); - - vm.expectRevert(ERROR_ONLY_STATE_TRANSITION_MANAGER); - - vm.startPrank(nonStateTransitionManager); - testBaseFacet.functionWithOnlyStateTransitionManagerModifier(); - } - - function test_successfulCall() public { - address stateTransitionManager = utilsFacet.util_getStateTransitionManager(); - - vm.startPrank(stateTransitionManager); - testBaseFacet.functionWithOnlyStateTransitionManagerModifier(); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseTokenBridge.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseTokenBridge.t.sol deleted file mode 100644 index db32ca6bd..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetBaseTokenBridge.t.sol +++ /dev/null @@ -1,16 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {GettersFacetTest} from "./_Getters_Shared.t.sol"; - -contract GetBaseTokenBridgeTest is GettersFacetTest { - function test() public { - address expected = makeAddr("baseTokenBride"); - gettersFacetWrapper.util_setBaseTokenBridge(expected); - - address received = gettersFacet.getBaseTokenBridge(); - - assertEq(expected, received, "BaseTokenBridge address is incorrect"); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetStateTransitionManager.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetStateTransitionManager.t.sol deleted file mode 100644 index 9b3038f97..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/GetStateTransitionManager.t.sol +++ /dev/null @@ -1,16 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {GettersFacetTest} from "./_Getters_Shared.t.sol"; - -contract GetStateTransitionManagerTest is GettersFacetTest { - function test() public { - address expected = makeAddr("stateTransitionManager"); - gettersFacetWrapper.util_setStateTransitionManager(expected); - - address received = gettersFacet.getStateTransitionManager(); - - assertEq(expected, received, "StateTransitionManager address is incorrect"); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/PriorityQueueFrontOperation.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/PriorityQueueFrontOperation.t.sol deleted file mode 100644 index d17577afc..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/chain-deps/facets/Getters/PriorityQueueFrontOperation.t.sol +++ /dev/null @@ -1,29 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import {GettersFacetTest} from "./_Getters_Shared.t.sol"; -import {PriorityOperation} from "contracts/state-transition/libraries/PriorityQueue.sol"; - -contract GetPriorityQueueFrontOperationTest is GettersFacetTest { - function test_revertWhen_queueIsEmpty() public { - vm.expectRevert(bytes.concat("D")); - gettersFacet.priorityQueueFrontOperation(); - } - - function test() public { - PriorityOperation memory expected = PriorityOperation({ - canonicalTxHash: bytes32(uint256(1)), - expirationTimestamp: uint64(2), - layer2Tip: uint192(3) - }); - - gettersFacetWrapper.util_setPriorityQueueFrontOperation(expected); - - PriorityOperation memory received = gettersFacet.priorityQueueFrontOperation(); - - bytes32 expectedHash = keccak256(abi.encode(expected)); - bytes32 receivedHash = keccak256(abi.encode(received)); - assertEq(expectedHash, receivedHash, "Priority queue front operation is incorrect"); - } -} diff --git a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/Merkle/Merkle.t.sol b/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/Merkle/Merkle.t.sol deleted file mode 100644 index 492d489c2..000000000 --- a/l1-contracts/test/foundry/unit/concrete/state-transition/libraries/Merkle/Merkle.t.sol +++ /dev/null @@ -1,66 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import {Test} from "forge-std/Test.sol"; -import {MerkleTest} from "contracts/dev-contracts/test/MerkleTest.sol"; -import {MerkleTreeNoSort} from "./MerkleTreeNoSort.sol"; - -contract MerkleTestTest is Test { - MerkleTreeNoSort merkleTree; - MerkleTest merkleTest; - bytes32[] elements; - bytes32 root; - - function setUp() public { - merkleTree = new MerkleTreeNoSort(); - merkleTest = new MerkleTest(); - - for (uint256 i = 0; i < 65; i++) { - elements.push(keccak256(abi.encodePacked(i))); - } - - root = merkleTree.getRoot(elements); - } - - function testElements(uint256 i) public { - vm.assume(i < elements.length); - bytes32 leaf = elements[i]; - bytes32[] memory proof = merkleTree.getProof(elements, i); - - bytes32 rootFromContract = merkleTest.calculateRoot(proof, i, leaf); - - assertEq(rootFromContract, root); - } - - function testFirstElement() public { - testElements(0); - } - - function testLastElement() public { - testElements(elements.length - 1); - } - - function testEmptyProof_shouldRevert() public { - bytes32 leaf = elements[0]; - bytes32[] memory proof; - - vm.expectRevert(bytes("xc")); - merkleTest.calculateRoot(proof, 0, leaf); - } - - function testLeafIndexTooBig_shouldRevert() public { - bytes32 leaf = elements[0]; - bytes32[] memory proof = merkleTree.getProof(elements, 0); - - vm.expectRevert(bytes("px")); - merkleTest.calculateRoot(proof, 2 ** 255, leaf); - } - - function testProofLengthTooLarge_shouldRevert() public { - bytes32 leaf = elements[0]; - bytes32[] memory proof = new bytes32[](256); - - vm.expectRevert(bytes("bt")); - merkleTest.calculateRoot(proof, 0, leaf); - } -} diff --git a/l1-contracts/test/test_config/constant/hardhat.json b/l1-contracts/test/test_config/constant/hardhat.json index 0e63431f0..7e18d5adf 100644 --- a/l1-contracts/test/test_config/constant/hardhat.json +++ b/l1-contracts/test/test_config/constant/hardhat.json @@ -3,96 +3,96 @@ "name": "DAI", "symbol": "DAI", "decimals": 18, - "address": "0xD6E49dd4fb0CA1549566869725d1820aDEb92Ae9" + "address": "0x2733174391e451C1708050dE4442f2AaF197759C" }, { "name": "wBTC", "symbol": "wBTC", "decimals": 8, - "address": "0xcee1f75F30B6908286Cd003C4228A5D9a2851FA4" + "address": "0x5195459c6a59dA8f5bED6a9E7692d5b5b40A2928" }, { "name": "BAT", "symbol": "BAT", "decimals": 18, - "address": "0x0Bc76A4EfE0748f1697F237fB100741ea6Ceda2d" + "address": "0x18fb101C1f3ab450498fD0D4400e4D8a3c1B9F6c" }, { "name": "GNT", "symbol": "GNT", "decimals": 18, - "address": "0x51ae50BcCEE10ac5BEFFA1E4a64106a5f83bc3F8" + "address": "0x20bABCb488aad652e713fB70BACCD0c8e72de4EF" }, { "name": "MLTT", "symbol": "MLTT", "decimals": 18, - "address": "0xa9c7fEEf8586E17D93A05f873BA65f28f48ED259" + "address": "0xFF9d5a057d09802c371582D1166df728D3b019A4" }, { "name": "DAIK", "symbol": "DAIK", "decimals": 18, - "address": "0x99Efb27598804Aa408A1066550e9d01c45f21b05" + "address": "0xAC5d1395Dd3bA956bB8Ba0e0E8ffe247404fd9c5" }, { "name": "wBTCK", "symbol": "wBTCK", "decimals": 8, - "address": "0x4B701928Da6B3e72775b462A15b8b76ba2d16BbD" + "address": "0x9614c0F8e657eAb74e95B87cA819C6ae1F9d5fe1" }, { "name": "BATK", "symbol": "BATS", "decimals": 18, - "address": "0xf7B03c921dfefB4286b13075BA0335099708368D" + "address": "0x175F95E3c6a30c3D4DDBA32f82427C5d371f67B8" }, { "name": "GNTK", "symbol": "GNTS", "decimals": 18, - "address": "0xc0581Ee28c519533B06cc0aAC1ace98cF63C817b" + "address": "0x205725BE39c54574e64771C3c71c44829E3031dC" }, { "name": "MLTTK", "symbol": "MLTTS", "decimals": 18, - "address": "0xeB6394F2E8DA607b94dBa2Cf345A965d6D9b3aCD" + "address": "0xADDb1960aCAAC7db90E3d20b9621D8b8C0b97405" }, { "name": "DAIL", "symbol": "DAIL", "decimals": 18, - "address": "0x4311643C5eD7cD0813B4E3Ff5428de71c7d7b8bB" + "address": "0x5F0A3258CF075F828a01bEAf63cCea32159B4EcD" }, { "name": "wBTCL", "symbol": "wBTCP", "decimals": 8, - "address": "0x6b3fbfC9Bb89Ab5F11BE782a1f67c1615c2A5fc3" + "address": "0x09405DfB0C61959daA219e9E9907223e7F091587" }, { "name": "BATL", "symbol": "BATW", "decimals": 18, - "address": "0xE003698b7831829843B69D3fB4f9a3133d97b257" + "address": "0xA318F029bc204EaF55A6f480f9Dc6Ef0C235d2D6" }, { "name": "GNTL", "symbol": "GNTW", "decimals": 18, - "address": "0x2417626170675Ccf6022d9db1eFC8f3c59836368" + "address": "0x9910Ed28A31C2b4EE8A7C1F8559Cf1a874e374F2" }, { "name": "MLTTL", "symbol": "MLTTW", "decimals": 18, - "address": "0x28106C39BE5E51C31D9a289313361D86C9bb7C8E" + "address": "0x3bC2c8eF2f110750bfa9aA89D30F3D66c2a03bb9" }, { "name": "Wrapped Ether", "symbol": "WETH", "decimals": 18, - "address": "0x51E83b811930bb4a3aAb3494894ec237Cb6cEc49" + "address": "0x6930c5c3421a666d6642FafBe750B1D0B42197c6" } ] diff --git a/l1-contracts/test/unit_tests/custom_base_token.spec.ts b/l1-contracts/test/unit_tests/custom_base_token.spec.ts index b0e9733a3..2fc87d199 100644 --- a/l1-contracts/test/unit_tests/custom_base_token.spec.ts +++ b/l1-contracts/test/unit_tests/custom_base_token.spec.ts @@ -1,18 +1,19 @@ import { expect } from "chai"; import * as hardhat from "hardhat"; import { ethers, Wallet } from "ethers"; -import { Interface } from "ethers/lib/utils"; import type { TestnetERC20Token } from "../../typechain"; import { TestnetERC20TokenFactory } from "../../typechain"; import type { IBridgehub } from "../../typechain/IBridgehub"; import { IBridgehubFactory } from "../../typechain/IBridgehubFactory"; -import type { IL1SharedBridge } from "../../typechain/IL1SharedBridge"; -import { IL1SharedBridgeFactory } from "../../typechain/IL1SharedBridgeFactory"; +import type { IL1AssetRouter } from "../../typechain/IL1AssetRouter"; +import { IL1AssetRouterFactory } from "../../typechain/IL1AssetRouterFactory"; +import type { IL1NativeTokenVault } from "../../typechain/IL1NativeTokenVault"; +import { IL1NativeTokenVaultFactory } from "../../typechain/IL1NativeTokenVaultFactory"; import { getTokens } from "../../src.ts/deploy-token"; import type { Deployer } from "../../src.ts/deploy"; -import { ADDRESS_ONE, ethTestConfig } from "../../src.ts/utils"; +import { ethTestConfig } from "../../src.ts/utils"; import { initialTestnetDeploymentProcess } from "../../src.ts/deploy-test-process"; import { getCallRevertReason, REQUIRED_L2_GAS_PRICE_PER_PUBDATA } from "./utils"; @@ -22,8 +23,9 @@ describe("Custom base token chain and bridge tests", () => { let randomSigner: ethers.Signer; let deployWallet: Wallet; let deployer: Deployer; - let l1SharedBridge: IL1SharedBridge; + let l1SharedBridge: IL1AssetRouter; let bridgehub: IBridgehub; + let nativeTokenVault: IL1NativeTokenVault; let baseToken: TestnetERC20Token; let baseTokenAddress: string; let altTokenAddress: string; @@ -61,27 +63,20 @@ describe("Custom base token chain and bridge tests", () => { altToken = TestnetERC20TokenFactory.connect(altTokenAddress, owner); // prepare the bridge - l1SharedBridge = IL1SharedBridgeFactory.connect(deployer.addresses.Bridges.SharedBridgeProxy, deployWallet); + l1SharedBridge = IL1AssetRouterFactory.connect(deployer.addresses.Bridges.SharedBridgeProxy, deployWallet); + + nativeTokenVault = IL1NativeTokenVaultFactory.connect( + deployer.addresses.Bridges.NativeTokenVaultProxy, + deployWallet + ); }); it("Should have correct base token", async () => { // we should still be able to deploy the erc20 bridge - const baseTokenAddressInBridgehub = await bridgehub.baseToken(chainId); + const baseTokenAddressInBridgehub = await bridgehub.baseToken(deployer.chainId); expect(baseTokenAddress).equal(baseTokenAddressInBridgehub); }); - it("Check should initialize through governance", async () => { - const l1SharedBridgeInterface = new Interface(hardhat.artifacts.readArtifactSync("L1SharedBridge").abi); - const upgradeCall = l1SharedBridgeInterface.encodeFunctionData("initializeChainGovernance(uint256,address)", [ - chainId, - ADDRESS_ONE, - ]); - - const txHash = await deployer.executeUpgrade(l1SharedBridge.address, 0, upgradeCall); - - expect(txHash).not.equal(ethers.constants.HashZero); - }); - it("Should not allow direct legacy deposits", async () => { const revertReason = await getCallRevertReason( l1SharedBridge @@ -97,7 +92,7 @@ describe("Custom base token chain and bridge tests", () => { ) ); - expect(revertReason).equal("ShB not legacy bridge"); + expect(revertReason).contains("Unauthorized"); }); it("Should deposit base token successfully direct via bridgehub", async () => { @@ -119,6 +114,7 @@ describe("Custom base token chain and bridge tests", () => { }); it("Should deposit alternative token successfully twoBridges method", async () => { + nativeTokenVault.registerToken(altTokenAddress); const altTokenAmount = ethers.utils.parseUnits("800", 18); const baseTokenAmount = ethers.utils.parseUnits("800", 18); @@ -137,23 +133,28 @@ describe("Custom base token chain and bridge tests", () => { secondBridgeAddress: l1SharedBridge.address, secondBridgeValue: 0, secondBridgeCalldata: ethers.utils.defaultAbiCoder.encode( - ["address", "uint256", "address"], - [altTokenAddress, altTokenAmount, await randomSigner.getAddress()] + ["bytes32", "bytes", "address"], + [ + ethers.utils.hexZeroPad(altTokenAddress, 32), + new ethers.utils.AbiCoder().encode(["uint256"], [altTokenAmount]), + await randomSigner.getAddress(), + ] ), }); }); it("Should revert on finalizing a withdrawal with wrong message length", async () => { + const mailboxFunctionSignature = "0x6c0960f9"; const revertReason = await getCallRevertReason( - l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 0, 0, 0, "0x", []) + l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 0, 0, 0, mailboxFunctionSignature, []) ); - expect(revertReason).equal("ShB wrong msg len"); + expect(revertReason).contains("L2WithdrawalMessageWrongLength"); }); it("Should revert on finalizing a withdrawal with wrong function selector", async () => { const revertReason = await getCallRevertReason( l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 0, 0, 0, ethers.utils.randomBytes(96), []) ); - expect(revertReason).equal("ShB Incorrect message function selector"); + expect(revertReason).contains("InvalidSelector"); }); }); diff --git a/l1-contracts/test/unit_tests/gateway.spec.ts b/l1-contracts/test/unit_tests/gateway.spec.ts new file mode 100644 index 000000000..37460e02a --- /dev/null +++ b/l1-contracts/test/unit_tests/gateway.spec.ts @@ -0,0 +1,184 @@ +import { expect } from "chai"; +import * as ethers from "ethers"; +import { Wallet } from "ethers"; +import * as hardhat from "hardhat"; + +import type { Bridgehub } from "../../typechain"; +import { BridgehubFactory } from "../../typechain"; + +import { + initialTestnetDeploymentProcess, + defaultDeployerForTests, + registerZKChainWithBridgeRegistration, +} from "../../src.ts/deploy-test-process"; +import { + ethTestConfig, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + priorityTxMaxGasLimit, + L2_BRIDGEHUB_ADDRESS, +} from "../../src.ts/utils"; +import { SYSTEM_CONFIG } from "../../scripts/utils"; + +import type { Deployer } from "../../src.ts/deploy"; + +describe("Gateway", function () { + let bridgehub: Bridgehub; + // let stateTransition: ChainTypeManager; + let owner: ethers.Signer; + let migratingDeployer: Deployer; + let gatewayDeployer: Deployer; + // const MAX_CODE_LEN_WORDS = (1 << 16) - 1; + // const MAX_CODE_LEN_BYTES = MAX_CODE_LEN_WORDS * 32; + // let forwarder: Forwarder; + let chainId = process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID || 270; + const mintChainId = 11; + + before(async () => { + [owner] = await hardhat.ethers.getSigners(); + + const deployWallet = Wallet.fromMnemonic(ethTestConfig.test_mnemonic3, "m/44'/60'/0'/0/1").connect(owner.provider); + const ownerAddress = await deployWallet.getAddress(); + + const gasPrice = await owner.provider.getGasPrice(); + + const tx = { + from: await owner.getAddress(), + to: deployWallet.address, + value: ethers.utils.parseEther("1000"), + nonce: owner.getTransactionCount(), + gasLimit: 100000, + gasPrice: gasPrice, + }; + + await owner.sendTransaction(tx); + + migratingDeployer = await initialTestnetDeploymentProcess(deployWallet, ownerAddress, gasPrice, []); + // We will use the chain admin as the admin to be closer to the production environment + await migratingDeployer.transferAdminFromDeployerToChainAdmin(); + + chainId = migratingDeployer.chainId; + + bridgehub = BridgehubFactory.connect(migratingDeployer.addresses.Bridgehub.BridgehubProxy, deployWallet); + + gatewayDeployer = await defaultDeployerForTests(deployWallet, ownerAddress); + gatewayDeployer.chainId = 10; + await registerZKChainWithBridgeRegistration( + gatewayDeployer, + false, + [], + gasPrice, + undefined, + gatewayDeployer.chainId.toString() + ); + + // For tests, the chainId is 9 + migratingDeployer.chainId = 9; + }); + + it("Check register synclayer", async () => { + await gatewayDeployer.registerSettlementLayer(); + }); + + it("Check start move chain to synclayer", async () => { + const gasPrice = await owner.provider.getGasPrice(); + await migratingDeployer.moveChainToGateway(gatewayDeployer.chainId.toString(), gasPrice); + expect(await bridgehub.settlementLayer(migratingDeployer.chainId)).to.equal(gatewayDeployer.chainId); + }); + + it("Check l2 registration", async () => { + const ctm = migratingDeployer.chainTypeManagerContract(migratingDeployer.deployWallet); + const gasPrice = await migratingDeployer.deployWallet.provider.getGasPrice(); + const value = ( + await bridgehub.l2TransactionBaseCost(chainId, gasPrice, priorityTxMaxGasLimit, REQUIRED_L2_GAS_PRICE_PER_PUBDATA) + ).mul(10); + + const ctmDeploymentTracker = migratingDeployer.ctmDeploymentTracker(migratingDeployer.deployWallet); + const assetRouter = migratingDeployer.defaultSharedBridge(migratingDeployer.deployWallet); + const assetId = await bridgehub.ctmAssetIdFromChainId(chainId); + + await migratingDeployer.executeUpgrade( + bridgehub.address, + value, + bridgehub.interface.encodeFunctionData("requestL2TransactionTwoBridges", [ + { + chainId, + mintValue: value, + l2Value: 0, + l2GasLimit: priorityTxMaxGasLimit, + l2GasPerPubdataByteLimit: SYSTEM_CONFIG.requiredL2GasPricePerPubdata, + refundRecipient: migratingDeployer.deployWallet.address, + secondBridgeAddress: assetRouter.address, + secondBridgeValue: 0, + secondBridgeCalldata: + "0x02" + + ethers.utils.defaultAbiCoder.encode(["bytes32", "address"], [assetId, L2_BRIDGEHUB_ADDRESS]).slice(2), + }, + ]) + ); + await migratingDeployer.executeUpgrade( + bridgehub.address, + value, + bridgehub.interface.encodeFunctionData("requestL2TransactionTwoBridges", [ + { + chainId, + mintValue: value, + l2Value: 0, + l2GasLimit: priorityTxMaxGasLimit, + l2GasPerPubdataByteLimit: SYSTEM_CONFIG.requiredL2GasPricePerPubdata, + refundRecipient: migratingDeployer.deployWallet.address, + secondBridgeAddress: ctmDeploymentTracker.address, + secondBridgeValue: 0, + secondBridgeCalldata: + "0x01" + ethers.utils.defaultAbiCoder.encode(["address", "address"], [ctm.address, ctm.address]).slice(2), + }, + ]) + ); + // console.log("CTM asset registered in L2 Bridgehub on SL"); + }); + + it("Check start message to L3 on L1", async () => { + const amount = ethers.utils.parseEther("2"); + await bridgehub.requestL2TransactionDirect( + { + chainId: migratingDeployer.chainId, + mintValue: amount, + l2Contract: ethers.constants.AddressZero, + l2Value: 0, + l2Calldata: "0x", + l2GasLimit: priorityTxMaxGasLimit, + l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + factoryDeps: [], + refundRecipient: ethers.constants.AddressZero, + }, + { value: amount } + ); + }); + + it("Check forward message to L3 on SL", async () => { + const tx = { + txType: 255, + from: ethers.constants.AddressZero, + to: ethers.constants.AddressZero, + gasLimit: priorityTxMaxGasLimit, + gasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + maxFeePerGas: 1, + maxPriorityFeePerGas: 0, + paymaster: 0, + // Note, that the priority operation id is used as "nonce" for L1->L2 transactions + nonce: 0, + value: 0, + reserved: [0 as ethers.BigNumberish, 0, 0, 0] as [ + ethers.BigNumberish, + ethers.BigNumberish, + ethers.BigNumberish, + ethers.BigNumberish, + ], + data: "0x", + signature: ethers.constants.HashZero, + factoryDeps: [], + paymasterInput: "0x", + reservedDynamic: "0x", + }; + bridgehub.forwardTransactionOnGateway(mintChainId, tx, [], ethers.constants.HashZero, 0); + }); +}); diff --git a/l1-contracts/test/unit_tests/governance_test.spec.ts b/l1-contracts/test/unit_tests/governance_test.spec.ts index 4ab11f266..e689def8d 100644 --- a/l1-contracts/test/unit_tests/governance_test.spec.ts +++ b/l1-contracts/test/unit_tests/governance_test.spec.ts @@ -13,17 +13,18 @@ describe("Admin facet tests", function () { before(async () => { const contractFactory = await hardhat.ethers.getContractFactory("AdminFacetTest"); - const contract = await contractFactory.deploy(); + const contract = await contractFactory.deploy(await contractFactory.signer.getChainId()); adminFacetTest = AdminFacetTestFactory.connect(contract.address, contract.signer); - const governanceContract = await contractFactory.deploy(); + const governanceContract = await contractFactory.deploy(await contractFactory.signer.getChainId()); + const governance = GovernanceFactory.connect(governanceContract.address, governanceContract.signer); await adminFacetTest.setPendingAdmin(governance.address); randomSigner = (await hardhat.ethers.getSigners())[1]; }); - it("StateTransitionManager successfully set validator", async () => { + it("ChainTypeManager successfully set validator", async () => { const validatorAddress = randomAddress(); await adminFacetTest.setValidator(validatorAddress, true); @@ -36,10 +37,10 @@ describe("Admin facet tests", function () { const revertReason = await getCallRevertReason( adminFacetTest.connect(randomSigner).setValidator(validatorAddress, true) ); - expect(revertReason).equal("Hyperchain: not state transition manager"); + expect(revertReason).contains("Unauthorized"); }); - it("StateTransitionManager successfully set porter availability", async () => { + it("ChainTypeManager successfully set porter availability", async () => { await adminFacetTest.setPorterAvailability(true); const porterAvailability = await adminFacetTest.getPorterAvailability(); @@ -48,10 +49,10 @@ describe("Admin facet tests", function () { it("random account fails to set porter availability", async () => { const revertReason = await getCallRevertReason(adminFacetTest.connect(randomSigner).setPorterAvailability(false)); - expect(revertReason).equal("Hyperchain: not state transition manager"); + expect(revertReason).contains("Unauthorized"); }); - it("StateTransitionManager successfully set priority transaction max gas limit", async () => { + it("ChainTypeManager successfully set priority transaction max gas limit", async () => { const gasLimit = "12345678"; await adminFacetTest.setPriorityTxMaxGasLimit(gasLimit); @@ -64,7 +65,7 @@ describe("Admin facet tests", function () { const revertReason = await getCallRevertReason( adminFacetTest.connect(randomSigner).setPriorityTxMaxGasLimit(gasLimit) ); - expect(revertReason).equal("Hyperchain: not state transition manager"); + expect(revertReason).contains("Unauthorized"); }); describe("change admin", function () { @@ -92,7 +93,7 @@ describe("Admin facet tests", function () { it("failed to accept admin from not proposed account", async () => { const revertReason = await getCallRevertReason(adminFacetTest.connect(randomSigner).acceptAdmin()); - expect(revertReason).equal("n4"); + expect(revertReason).contains("Unauthorized"); }); it("accept admin from proposed account", async () => { diff --git a/l1-contracts/test/unit_tests/initial_deployment_test.spec.ts b/l1-contracts/test/unit_tests/initial_deployment_test.spec.ts index a70594304..532fd57d1 100644 --- a/l1-contracts/test/unit_tests/initial_deployment_test.spec.ts +++ b/l1-contracts/test/unit_tests/initial_deployment_test.spec.ts @@ -3,22 +3,32 @@ import * as ethers from "ethers"; import { Wallet } from "ethers"; import * as hardhat from "hardhat"; -import type { Bridgehub, StateTransitionManager } from "../../typechain"; -import { BridgehubFactory, StateTransitionManagerFactory } from "../../typechain"; +import type { Bridgehub, ChainTypeManager, L1NativeTokenVault, L1AssetRouter, L1Nullifier } from "../../typechain"; +import { + BridgehubFactory, + ChainTypeManagerFactory, + L1NativeTokenVaultFactory, + L1AssetRouterFactory, + L1NullifierFactory, +} from "../../typechain"; import { initialTestnetDeploymentProcess } from "../../src.ts/deploy-test-process"; import { ethTestConfig } from "../../src.ts/utils"; import type { Deployer } from "../../src.ts/deploy"; +import { registerZKChain } from "../../src.ts/deploy-process"; -describe("Initial deployment", function () { +describe("Initial deployment test", function () { let bridgehub: Bridgehub; - let stateTransition: StateTransitionManager; + let chainTypeManager: ChainTypeManager; let owner: ethers.Signer; let deployer: Deployer; // const MAX_CODE_LEN_WORDS = (1 << 16) - 1; // const MAX_CODE_LEN_BYTES = MAX_CODE_LEN_WORDS * 32; // let forwarder: Forwarder; + let l1NativeTokenVault: L1NativeTokenVault; + let l1AssetRouter: L1AssetRouter; + let l1Nullifier: L1Nullifier; let chainId = process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID || 270; before(async () => { @@ -47,22 +57,53 @@ describe("Initial deployment", function () { // await deploySharedBridgeOnL2ThroughL1(deployer, chainId.toString(), gasPrice); bridgehub = BridgehubFactory.connect(deployer.addresses.Bridgehub.BridgehubProxy, deployWallet); - stateTransition = StateTransitionManagerFactory.connect( + chainTypeManager = ChainTypeManagerFactory.connect( deployer.addresses.StateTransition.StateTransitionProxy, deployWallet ); + l1NativeTokenVault = L1NativeTokenVaultFactory.connect( + deployer.addresses.Bridges.NativeTokenVaultProxy, + deployWallet + ); + l1AssetRouter = L1AssetRouterFactory.connect(deployer.addresses.Bridges.SharedBridgeProxy, deployWallet); + l1Nullifier = L1NullifierFactory.connect(deployer.addresses.Bridges.L1NullifierProxy, deployWallet); }); it("Check addresses", async () => { - const stateTransitionManagerAddress1 = deployer.addresses.StateTransition.StateTransitionProxy; - const stateTransitionManagerAddress2 = await bridgehub.stateTransitionManager(chainId); - expect(stateTransitionManagerAddress1.toLowerCase()).equal(stateTransitionManagerAddress2.toLowerCase()); - - const stateTransitionAddress1 = deployer.addresses.StateTransition.DiamondProxy; - const stateTransitionAddress2 = await stateTransition.getHyperchain(chainId); - expect(stateTransitionAddress1.toLowerCase()).equal(stateTransitionAddress2.toLowerCase()); + const bridgehubAddress1 = deployer.addresses.Bridgehub.BridgehubProxy; + const bridgehubAddress2 = await l1AssetRouter.BRIDGE_HUB(); + const bridgehubAddress3 = await chainTypeManager.BRIDGE_HUB(); + expect(bridgehubAddress1.toLowerCase()).equal(bridgehubAddress2.toLowerCase()); + expect(bridgehubAddress1.toLowerCase()).equal(bridgehubAddress3.toLowerCase()); + + const chainTypeManagerAddress1 = deployer.addresses.StateTransition.StateTransitionProxy; + const chainTypeManagerAddress2 = await bridgehub.chainTypeManager(chainId); + expect(chainTypeManagerAddress1.toLowerCase()).equal(chainTypeManagerAddress2.toLowerCase()); + + const chainAddress2 = await chainTypeManager.getZKChain(chainId); + const chainAddress1 = deployer.addresses.StateTransition.DiamondProxy; + expect(chainAddress1.toLowerCase()).equal(chainAddress2.toLowerCase()); + + const chainAddress3 = await bridgehub.getZKChain(chainId); + expect(chainAddress1.toLowerCase()).equal(chainAddress3.toLowerCase()); + + const assetRouterAddress1 = deployer.addresses.Bridges.SharedBridgeProxy; + const assetRouterAddress2 = await bridgehub.sharedBridge(); + const assetRouterAddress3 = await l1NativeTokenVault.ASSET_ROUTER(); + const assetRouterAddress4 = await l1Nullifier.l1AssetRouter(); + expect(assetRouterAddress1.toLowerCase()).equal(assetRouterAddress2.toLowerCase()); + expect(assetRouterAddress1.toLowerCase()).equal(assetRouterAddress3.toLowerCase()); + expect(assetRouterAddress1.toLowerCase()).equal(assetRouterAddress4.toLowerCase()); + + const ntvAddress1 = deployer.addresses.Bridges.NativeTokenVaultProxy; + const ntvAddress2 = await l1Nullifier.l1NativeTokenVault(); + const ntvAddress3 = await l1AssetRouter.nativeTokenVault(); + expect(ntvAddress1.toLowerCase()).equal(ntvAddress2.toLowerCase()); + expect(ntvAddress1.toLowerCase()).equal(ntvAddress3.toLowerCase()); + }); - const stateTransitionAddress3 = await bridgehub.getHyperchain(chainId); - expect(stateTransitionAddress1.toLowerCase()).equal(stateTransitionAddress3.toLowerCase()); + it("Check L2SharedBridge", async () => { + const gasPrice = await owner.provider.getGasPrice(); + await registerZKChain(deployer, false, [], gasPrice, "", "0x33", true, true); }); }); diff --git a/l1-contracts/test/unit_tests/l1_shared_bridge_test.spec.ts b/l1-contracts/test/unit_tests/l1_shared_bridge_test.spec.ts index b53b12175..e1e17128b 100644 --- a/l1-contracts/test/unit_tests/l1_shared_bridge_test.spec.ts +++ b/l1-contracts/test/unit_tests/l1_shared_bridge_test.spec.ts @@ -1,16 +1,22 @@ import { expect } from "chai"; import { ethers, Wallet } from "ethers"; -import { Interface } from "ethers/lib/utils"; import * as hardhat from "hardhat"; -import type { L1SharedBridge, Bridgehub, WETH9 } from "../../typechain"; -import { L1SharedBridgeFactory, BridgehubFactory, WETH9Factory, TestnetERC20TokenFactory } from "../../typechain"; +import type { L1AssetRouter, Bridgehub, L1NativeTokenVault, MockExecutorFacet } from "../../typechain"; +import { + L1AssetRouterFactory, + BridgehubFactory, + TestnetERC20TokenFactory, + MockExecutorFacetFactory, +} from "../../typechain"; +import { L1NativeTokenVaultFactory } from "../../typechain/L1NativeTokenVaultFactory"; import { getTokens } from "../../src.ts/deploy-token"; -import { ADDRESS_ONE, ethTestConfig } from "../../src.ts/utils"; +import { Action, facetCut } from "../../src.ts/diamondCut"; +import { ethTestConfig } from "../../src.ts/utils"; import type { Deployer } from "../../src.ts/deploy"; import { initialTestnetDeploymentProcess } from "../../src.ts/deploy-test-process"; -import { getCallRevertReason, REQUIRED_L2_GAS_PRICE_PER_PUBDATA } from "./utils"; +import { getCallRevertReason, REQUIRED_L2_GAS_PRICE_PER_PUBDATA, DUMMY_MERKLE_PROOF_START } from "./utils"; describe("Shared Bridge tests", () => { let owner: ethers.Signer; @@ -18,12 +24,14 @@ describe("Shared Bridge tests", () => { let deployWallet: Wallet; let deployer: Deployer; let bridgehub: Bridgehub; - let l1SharedBridge: L1SharedBridge; - let l1SharedBridgeInterface: Interface; - let l1Weth: WETH9; + let l1NativeTokenVault: L1NativeTokenVault; + let proxyAsMockExecutor: MockExecutorFacet; + let l1SharedBridge: L1AssetRouter; let erc20TestToken: ethers.Contract; - const functionSignature = "0x6c0960f9"; + const mailboxFunctionSignature = "0x6c0960f9"; const ERC20functionSignature = "0x11a2ccc1"; + const dummyProof = Array(9).fill(ethers.constants.HashZero); + dummyProof[0] = DUMMY_MERKLE_PROOF_START; let chainId = process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID || 270; @@ -46,39 +54,52 @@ describe("Shared Bridge tests", () => { await owner.sendTransaction(tx); + const mockExecutorFactory = await hardhat.ethers.getContractFactory("MockExecutorFacet"); + const mockExecutorContract = await mockExecutorFactory.deploy(); + const extraFacet = facetCut(mockExecutorContract.address, mockExecutorContract.interface, Action.Add, true); + // note we can use initialTestnetDeploymentProcess so we don't go into deployment details here - deployer = await initialTestnetDeploymentProcess(deployWallet, ownerAddress, gasPrice, []); + deployer = await initialTestnetDeploymentProcess(deployWallet, ownerAddress, gasPrice, [extraFacet]); chainId = deployer.chainId; // prepare the bridge - l1SharedBridge = L1SharedBridgeFactory.connect(deployer.addresses.Bridges.SharedBridgeProxy, deployWallet); + proxyAsMockExecutor = MockExecutorFacetFactory.connect( + deployer.addresses.StateTransition.DiamondProxy, + mockExecutorContract.signer + ); + + await ( + await proxyAsMockExecutor.saveL2LogsRootHash( + 0, + "0x0000000000000000000000000000000000000000000000000000000000000001" + ) + ).wait(); + + l1SharedBridge = L1AssetRouterFactory.connect(deployer.addresses.Bridges.SharedBridgeProxy, deployWallet); bridgehub = BridgehubFactory.connect(deployer.addresses.Bridgehub.BridgehubProxy, deployWallet); - l1SharedBridgeInterface = new Interface(hardhat.artifacts.readArtifactSync("L1SharedBridge").abi); + l1NativeTokenVault = L1NativeTokenVaultFactory.connect( + deployer.addresses.Bridges.NativeTokenVaultProxy, + deployWallet + ); const tokens = getTokens(); - const l1WethTokenAddress = tokens.find((token: { symbol: string }) => token.symbol == "WETH")!.address; - l1Weth = WETH9Factory.connect(l1WethTokenAddress, owner); const tokenAddress = tokens.find((token: { symbol: string }) => token.symbol == "DAI")!.address; erc20TestToken = TestnetERC20TokenFactory.connect(tokenAddress, owner); await erc20TestToken.mint(await randomSigner.getAddress(), ethers.utils.parseUnits("10000", 18)); - await erc20TestToken.connect(randomSigner).approve(l1SharedBridge.address, ethers.utils.parseUnits("10000", 18)); - }); - - it("Check should initialize through governance", async () => { - const upgradeCall = l1SharedBridgeInterface.encodeFunctionData("initializeChainGovernance(uint256,address)", [ - chainId, - ADDRESS_ONE, - ]); - const txHash = await deployer.executeUpgrade(l1SharedBridge.address, 0, upgradeCall); + await erc20TestToken + .connect(randomSigner) + .approve(l1NativeTokenVault.address, ethers.utils.parseUnits("10000", 18)); - expect(txHash).not.equal(ethers.constants.HashZero); + await l1NativeTokenVault.registerToken(erc20TestToken.address); }); it("Should not allow depositing zero erc20 amount", async () => { const mintValue = ethers.utils.parseEther("0.01"); + await (await erc20TestToken.connect(randomSigner).approve(l1NativeTokenVault.address, mintValue.mul(10))).wait(); + const revertReason = await getCallRevertReason( bridgehub.connect(randomSigner).requestL2TransactionTwoBridges( { @@ -98,15 +119,20 @@ describe("Shared Bridge tests", () => { { value: mintValue } ) ); - expect(revertReason).equal("6T"); + expect(revertReason).contains("EmptyDeposit"); }); - it("Should deposit successfully", async () => { + it("Should deposit successfully legacy encoding", async () => { const amount = ethers.utils.parseEther("1"); const mintValue = ethers.utils.parseEther("2"); - await l1Weth.connect(randomSigner).deposit({ value: amount }); - await (await l1Weth.connect(randomSigner).approve(l1SharedBridge.address, amount)).wait(); - bridgehub.connect(randomSigner).requestL2TransactionTwoBridges( + + await erc20TestToken.connect(randomSigner).mint(await randomSigner.getAddress(), amount.mul(10)); + + const balanceBefore = await erc20TestToken.balanceOf(await randomSigner.getAddress()); + const balanceNTVBefore = await erc20TestToken.balanceOf(l1NativeTokenVault.address); + + await (await erc20TestToken.connect(randomSigner).approve(l1NativeTokenVault.address, amount.mul(10))).wait(); + await bridgehub.connect(randomSigner).requestL2TransactionTwoBridges( { chainId, mintValue, @@ -118,18 +144,24 @@ describe("Shared Bridge tests", () => { secondBridgeValue: 0, secondBridgeCalldata: new ethers.utils.AbiCoder().encode( ["address", "uint256", "address"], - [l1Weth.address, amount, await randomSigner.getAddress()] + [erc20TestToken.address, amount, await randomSigner.getAddress()] ), }, { value: mintValue } ); + const balanceAfter = await erc20TestToken.balanceOf(await randomSigner.getAddress()); + expect(balanceAfter).equal(balanceBefore.sub(amount)); + const balanceNTVAfter = await erc20TestToken.balanceOf(l1NativeTokenVault.address); + expect(balanceNTVAfter).equal(balanceNTVBefore.add(amount)); }); it("Should revert on finalizing a withdrawal with short message length", async () => { const revertReason = await getCallRevertReason( - l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 0, 0, 0, "0x", [ethers.constants.HashZero]) + l1SharedBridge + .connect(randomSigner) + .finalizeWithdrawal(chainId, 0, 0, 0, mailboxFunctionSignature, [ethers.constants.HashZero]) ); - expect(revertReason).equal("ShB wrong msg len"); + expect(revertReason).contains("L2WithdrawalMessageWrongLength"); }); it("Should revert on finalizing a withdrawal with wrong message length", async () => { @@ -141,49 +173,51 @@ describe("Shared Bridge tests", () => { 0, 0, 0, - ethers.utils.hexConcat([ERC20functionSignature, l1SharedBridge.address, ethers.utils.randomBytes(72 + 4)]), + ethers.utils.hexConcat([ERC20functionSignature, l1SharedBridge.address, mailboxFunctionSignature]), [ethers.constants.HashZero] ) ); - expect(revertReason).equal("ShB wrong msg len 2"); + expect(revertReason).contains("L2WithdrawalMessageWrongLength"); }); it("Should revert on finalizing a withdrawal with wrong function selector", async () => { const revertReason = await getCallRevertReason( l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 0, 0, 0, ethers.utils.randomBytes(96), []) ); - expect(revertReason).equal("ShB Incorrect message function selector"); + expect(revertReason).contains("InvalidSelector"); }); - it("Should deposit erc20 token successfully", async () => { - const amount = ethers.utils.parseEther("0.001"); - const mintValue = ethers.utils.parseEther("0.002"); - await l1Weth.connect(randomSigner).deposit({ value: amount }); - await (await l1Weth.connect(randomSigner).approve(l1SharedBridge.address, amount)).wait(); - bridgehub.connect(randomSigner).requestL2TransactionTwoBridges( - { - chainId, - mintValue, - l2Value: amount, - l2GasLimit: 1000000, - l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, - refundRecipient: ethers.constants.AddressZero, - secondBridgeAddress: l1SharedBridge.address, - secondBridgeValue: 0, - secondBridgeCalldata: new ethers.utils.AbiCoder().encode( - ["address", "uint256", "address"], - [l1Weth.address, amount, await randomSigner.getAddress()] - ), - }, - { value: mintValue } - ); - }); + // it("Should deposit erc20 token successfully", async () => { + // const amount = ethers.utils.parseEther("0.001"); + // const mintValue = ethers.utils.parseEther("0.002"); + // await l1Weth.connect(randomSigner).deposit({ value: amount }); + // await (await l1Weth.connect(randomSigner).approve(l1SharedBridge.address, amount)).wait(); + // bridgehub.connect(randomSigner).requestL2TransactionTwoBridges( + // { + // chainId, + // mintValue, + // l2Value: amount, + // l2GasLimit: 1000000, + // l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + // refundRecipient: ethers.constants.AddressZero, + // secondBridgeAddress: l1SharedBridge.address, + // secondBridgeValue: 0, + // secondBridgeCalldata: new ethers.utils.AbiCoder().encode( + // ["address", "uint256", "address"], + // [l1Weth.address, amount, await randomSigner.getAddress()] + // ), + // }, + // { value: mintValue } + // ); + // }); it("Should revert on finalizing a withdrawal with wrong message length", async () => { const revertReason = await getCallRevertReason( - l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 0, 0, 0, "0x", [ethers.constants.HashZero]) + l1SharedBridge + .connect(randomSigner) + .finalizeWithdrawal(chainId, 0, 0, 0, mailboxFunctionSignature, [ethers.constants.HashZero]) ); - expect(revertReason).equal("ShB wrong msg len"); + expect(revertReason).contains("L2WithdrawalMessageWrongLength"); }); it("Should revert on finalizing a withdrawal with wrong function signature", async () => { @@ -192,27 +226,27 @@ describe("Shared Bridge tests", () => { .connect(randomSigner) .finalizeWithdrawal(chainId, 0, 0, 0, ethers.utils.randomBytes(76), [ethers.constants.HashZero]) ); - expect(revertReason).equal("ShB Incorrect message function selector"); + expect(revertReason).contains("InvalidSelector"); }); it("Should revert on finalizing a withdrawal with wrong batch number", async () => { const l1Receiver = await randomSigner.getAddress(); const l2ToL1message = ethers.utils.hexConcat([ - functionSignature, + mailboxFunctionSignature, l1Receiver, erc20TestToken.address, ethers.constants.HashZero, ]); const revertReason = await getCallRevertReason( - l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 10, 0, 0, l2ToL1message, []) + l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 10, 0, 0, l2ToL1message, dummyProof) ); - expect(revertReason).equal("xx"); + expect(revertReason).contains("BatchNotExecuted"); }); it("Should revert on finalizing a withdrawal with wrong length of proof", async () => { const l1Receiver = await randomSigner.getAddress(); const l2ToL1message = ethers.utils.hexConcat([ - functionSignature, + mailboxFunctionSignature, l1Receiver, erc20TestToken.address, ethers.constants.HashZero, @@ -220,13 +254,13 @@ describe("Shared Bridge tests", () => { const revertReason = await getCallRevertReason( l1SharedBridge.connect(randomSigner).finalizeWithdrawal(chainId, 0, 0, 0, l2ToL1message, []) ); - expect(revertReason).equal("xc"); + expect(revertReason).contains("MerklePathEmpty"); }); it("Should revert on finalizing a withdrawal with wrong proof", async () => { const l1Receiver = await randomSigner.getAddress(); const l2ToL1message = ethers.utils.hexConcat([ - functionSignature, + mailboxFunctionSignature, l1Receiver, erc20TestToken.address, ethers.constants.HashZero, @@ -234,8 +268,8 @@ describe("Shared Bridge tests", () => { const revertReason = await getCallRevertReason( l1SharedBridge .connect(randomSigner) - .finalizeWithdrawal(chainId, 0, 0, 0, l2ToL1message, Array(9).fill(ethers.constants.HashZero)) + .finalizeWithdrawal(chainId, 0, 0, 0, l2ToL1message, [dummyProof[0], dummyProof[1]]) ); - expect(revertReason).equal("ShB withd w proof"); + expect(revertReason).contains("InvalidProof"); }); }); diff --git a/l1-contracts/test/unit_tests/l2-upgrade.test.spec.ts b/l1-contracts/test/unit_tests/l2-upgrade.test.spec.ts index 4ea71d99d..4d04f6695 100644 --- a/l1-contracts/test/unit_tests/l2-upgrade.test.spec.ts +++ b/l1-contracts/test/unit_tests/l2-upgrade.test.spec.ts @@ -5,7 +5,7 @@ import * as ethers from "ethers"; import * as hardhat from "hardhat"; import { hashBytecode } from "zksync-ethers/build/utils"; -import type { AdminFacet, ExecutorFacet, GettersFacet, StateTransitionManager } from "../../typechain"; +import type { AdminFacet, ExecutorFacet, GettersFacet, ChainTypeManager } from "../../typechain"; import { AdminFacetFactory, DummyAdminFacetFactory, @@ -13,7 +13,7 @@ import { DefaultUpgradeFactory, ExecutorFacetFactory, GettersFacetFactory, - StateTransitionManagerFactory, + ChainTypeManagerFactory, } from "../../typechain"; import { Ownable2StepFactory } from "../../typechain/Ownable2StepFactory"; @@ -27,6 +27,7 @@ import { diamondCut, Action, facetCut } from "../../src.ts/diamondCut"; import type { CommitBatchInfo, StoredBatchInfo, CommitBatchInfoWithTimestamp } from "./utils"; import { + encodeCommitBatchesData, L2_BOOTLOADER_ADDRESS, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS, @@ -39,15 +40,17 @@ import { buildCommitBatchInfoWithUpgrade, makeExecutedEqualCommitted, getBatchStoredInfo, + buildL2DARollupPubdataCommitment, + L2_TO_L1_MESSENGER, } from "./utils"; import { packSemver, unpackStringSemVer, addToProtocolVersion } from "../../scripts/utils"; -describe.only("L2 upgrade test", function () { +describe("L2 upgrade test", function () { let proxyExecutor: ExecutorFacet; let proxyAdmin: AdminFacet; let proxyGetters: GettersFacet; - let stateTransitionManager: StateTransitionManager; + let chainTypeManager: ChainTypeManager; let owner: ethers.Signer; @@ -113,14 +116,14 @@ describe.only("L2 upgrade test", function () { deployWallet ); - stateTransitionManager = StateTransitionManagerFactory.connect( + chainTypeManager = ChainTypeManagerFactory.connect( deployer.addresses.StateTransition.StateTransitionProxy, deployWallet ); await (await dummyAdminFacet.dummySetValidator(await deployWallet.getAddress())).wait(); - // do initial setChainIdUpgrade + // do initial GenesisUpgrade const upgradeTxHash = await proxyGetters.getL2SystemContractsUpgradeTxHash(); batch1InfoChainIdUpgrade = await buildCommitBatchInfoWithUpgrade( genesisStoredBatchInfo(), @@ -133,7 +136,10 @@ describe.only("L2 upgrade test", function () { ); const commitReceipt = await ( - await proxyExecutor.commitBatches(genesisStoredBatchInfo(), [batch1InfoChainIdUpgrade]) + await proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(genesisStoredBatchInfo(), [batch1InfoChainIdUpgrade]) + ) ).wait(); const commitment = commitReceipt.events[0].args.commitment; storedBatch1InfoChainIdUpgrade = getBatchStoredInfo(batch1InfoChainIdUpgrade, commitment); @@ -148,7 +154,10 @@ describe.only("L2 upgrade test", function () { }); const commitReceipt = await ( - await proxyExecutor.commitBatches(storedBatch1InfoChainIdUpgrade, [batch2Info]) + await proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(storedBatch1InfoChainIdUpgrade, [batch2Info]) + ) ).wait(); const commitment = commitReceipt.events[0].args.commitment; @@ -156,7 +165,7 @@ describe.only("L2 upgrade test", function () { expect(await proxyGetters.getL2SystemContractsUpgradeTxHash()).to.equal(ethers.constants.HashZero); await ( - await executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + await executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 1, 0), l2ProtocolUpgradeTx: noopUpgradeTransaction, }) @@ -173,22 +182,22 @@ describe.only("L2 upgrade test", function () { const { 0: major, 1: minor, 2: patch } = await proxyGetters.getSemverProtocolVersion(); const bootloaderRevertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { newProtocolVersion: packSemver(major, minor, patch + 1), bootloaderHash: ethers.utils.hexlify(hashBytecode(ethers.utils.randomBytes(32))), l2ProtocolUpgradeTx: noopUpgradeTransaction, }) ); - expect(bootloaderRevertReason).to.equal("Patch only upgrade can not set new bootloader"); + expect(bootloaderRevertReason).to.contain("PatchUpgradeCantSetBootloader"); const defaultAccountRevertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { newProtocolVersion: packSemver(major, minor, patch + 1), defaultAccountHash: ethers.utils.hexlify(hashBytecode(ethers.utils.randomBytes(32))), l2ProtocolUpgradeTx: noopUpgradeTransaction, }) ); - expect(defaultAccountRevertReason).to.equal("Patch only upgrade can not set new default account"); + expect(defaultAccountRevertReason).to.contain("PatchUpgradeCantSetDefaultAccount"); }); it("Should not allow upgrade transaction during patch upgrade", async () => { @@ -200,12 +209,12 @@ describe.only("L2 upgrade test", function () { }); const bootloaderRevertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { newProtocolVersion: packSemver(major, minor, patch + 1), l2ProtocolUpgradeTx: someTx, }) ); - expect(bootloaderRevertReason).to.equal("Patch only upgrade can not set upgrade transaction"); + expect(bootloaderRevertReason).to.contain("PatchCantSetUpgradeTxn"); }); it("Should not allow major version change", async () => { @@ -218,30 +227,30 @@ describe.only("L2 upgrade test", function () { }); const bootloaderRevertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { newProtocolVersion: newVersion, l2ProtocolUpgradeTx: someTx, }) ); - expect(bootloaderRevertReason).to.equal("Major must always be 0"); + expect(bootloaderRevertReason).to.contain("NewProtocolMajorVersionNotZero"); }); it("Timestamp should behave correctly", async () => { // Upgrade was scheduled for now should work fine const timeNow = (await hardhat.ethers.provider.getBlock("latest")).timestamp; - await executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + await executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { upgradeTimestamp: ethers.BigNumber.from(timeNow), l2ProtocolUpgradeTx: noopUpgradeTransaction, }); // Upgrade that was scheduled for the future should not work now const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { upgradeTimestamp: ethers.BigNumber.from(timeNow).mul(2), l2ProtocolUpgradeTx: noopUpgradeTransaction, }) ); - expect(revertReason).to.equal("Upgrade is not ready yet"); + expect(revertReason).contains("TimeNotReached"); }); it("Should require correct tx type for upgrade tx", async () => { @@ -249,13 +258,13 @@ describe.only("L2 upgrade test", function () { txType: 255, }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 3, 0), }) ); - expect(revertReason).to.equal("L2 system upgrade tx type is wrong"); + expect(revertReason).contains("InvalidTxType"); }); it("Should include the new protocol version as part of nonce", async () => { @@ -265,13 +274,13 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 4, 0), }) ); - expect(revertReason).to.equal("The new protocol version should be included in the L2 system upgrade tx"); + expect(revertReason).contains("L2UpgradeNonceNotEqualToNewProtocolVersion"); }); it("Should ensure monotonic protocol version", async () => { @@ -281,13 +290,13 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, newProtocolVersion: 0, }) ); - expect(revertReason).to.equal("New protocol version is not greater than the current one"); + expect(revertReason).contains("ProtocolVersionTooSmall"); }); it("Should ensure protocol version not increasing too much", async () => { @@ -297,13 +306,13 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 10000, 0), }) ); - expect(revertReason).to.equal("Too big protocol version difference"); + expect(revertReason).contains("ProtocolVersionMinorDeltaTooBig"); }); it("Should validate upgrade transaction overhead", async () => { @@ -313,13 +322,13 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 4, 0), }) ); - expect(revertReason).to.equal("my"); + expect(revertReason).contains("NotEnoughGas"); }); it("Should validate upgrade transaction gas max", async () => { @@ -329,13 +338,13 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 4, 0), }) ); - expect(revertReason).to.equal("ui"); + expect(revertReason).contains("TooMuchGas"); }); it("Should validate upgrade transaction cannot output more pubdata than processable", async () => { @@ -346,13 +355,13 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 4, 0), }) ); - expect(revertReason).to.equal("uk"); + expect(revertReason).contains("PubdataGreaterThanLimit"); }); it("Should validate factory deps", async () => { @@ -364,14 +373,14 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, factoryDeps: [myFactoryDep], newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 4, 0), }) ); - expect(revertReason).to.equal("Wrong factory dep hash"); + expect(revertReason).contains("L2BytecodeHashMismatch"); }); it("Should validate factory deps length match", async () => { @@ -382,14 +391,14 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, factoryDeps: [myFactoryDep], newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 4, 0), }) ); - expect(revertReason).to.equal("Wrong number of factory deps"); + expect(revertReason).contains("UnexpectedNumberOfFactoryDeps"); }); it("Should validate factory deps length isn't too large", async () => { @@ -402,14 +411,14 @@ describe.only("L2 upgrade test", function () { }); const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { l2ProtocolUpgradeTx: wrongTx, factoryDeps: Array(33).fill(myFactoryDep), newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 4, 0), }) ); - expect(revertReason).to.equal("Factory deps can be at most 32"); + expect(revertReason).contains("TooManyFactoryDeps"); }); let l2UpgradeTxHash: string; @@ -444,7 +453,7 @@ describe.only("L2 upgrade test", function () { }; const upgradeReceipt = await ( - await executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, upgrade) + await executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, upgrade) ).wait(); const defaultUpgradeFactory = await hardhat.ethers.getContractFactory("DefaultUpgrade"); @@ -531,7 +540,7 @@ describe.only("L2 upgrade test", function () { }; const upgradeReceipt = await ( - await executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, upgrade) + await executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, upgrade) ).wait(); const defaultUpgradeFactory = await hardhat.ethers.getContractFactory("DefaultUpgrade"); @@ -609,29 +618,26 @@ describe.only("L2 upgrade test", function () { newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 5 + 1, 0), }; const revertReason = await getCallRevertReason( - executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, upgrade) + executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, upgrade) ); - await rollBackToVersion( - addToProtocolVersion(initialProtocolVersion, 5, 1).toString(), - stateTransitionManager, - upgrade - ); - expect(revertReason).to.equal("Previous upgrade has not been finalized"); + await rollBackToVersion(addToProtocolVersion(initialProtocolVersion, 5, 1).toString(), chainTypeManager, upgrade); + expect(revertReason).to.contains("PreviousUpgradeNotFinalized"); }); - it("Should require that the next commit batches contains an upgrade tx", async () => { - if (!l2UpgradeTxHash) { - throw new Error("Can not perform this test without l2UpgradeTxHash"); - } - - const batch3InfoNoUpgradeTx = await buildCommitBatchInfo(storedBatch2Info, { - batchNumber: 3, - }); - const revertReason = await getCallRevertReason( - proxyExecutor.commitBatches(storedBatch2Info, [batch3InfoNoUpgradeTx]) - ); - expect(revertReason).to.equal("b8"); - }); + // TODO: restore test + // it("Should require that the next commit batches contains an upgrade tx", async () => { + // if (!l2UpgradeTxHash) { + // throw new Error("Can not perform this test without l2UpgradeTxHash"); + // } + + // const batch3InfoNoUpgradeTx = await buildCommitBatchInfo(storedBatch2Info, { + // batchNumber: 3, + // }); + // const revertReason = await getCallRevertReason( + // proxyExecutor.commitBatchesSharedBridge(chainId, ...encodeCommitBatchesData(storedBatch2Info, [batch3InfoNoUpgradeTx])) + // ); + // expect(revertReason).to.contains("MissingSystemLogs"); + // }); it("Should ensure any additional upgrade logs go to the priority ops hash", async () => { if (!l2UpgradeTxHash) { @@ -639,6 +645,7 @@ describe.only("L2 upgrade test", function () { } const systemLogs = createSystemLogs(); + systemLogs.push( constructL2Log( true, @@ -669,10 +676,14 @@ describe.only("L2 upgrade test", function () { }, systemLogs ); + const revertReason = await getCallRevertReason( - proxyExecutor.commitBatches(storedBatch2Info, [batch3InfoNoUpgradeTx]) + proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(storedBatch2Info, [batch3InfoNoUpgradeTx]) + ) ); - expect(revertReason).to.equal("kp"); + expect(revertReason).to.contains("LogAlreadyProcessed"); }); it("Should fail to commit when upgrade tx hash does not match", async () => { @@ -703,9 +714,12 @@ describe.only("L2 upgrade test", function () { ); const revertReason = await getCallRevertReason( - proxyExecutor.commitBatches(storedBatch2Info, [batch3InfoTwoUpgradeTx]) + proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(storedBatch2Info, [batch3InfoTwoUpgradeTx]) + ) ); - expect(revertReason).to.equal("ut"); + expect(revertReason).to.contains("TxHashMismatch"); }); it("Should commit successfully when the upgrade tx is present", async () => { @@ -735,13 +749,18 @@ describe.only("L2 upgrade test", function () { systemLogs ); - await (await proxyExecutor.commitBatches(storedBatch2Info, [batch3InfoTwoUpgradeTx])).wait(); + await ( + await proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(storedBatch2Info, [batch3InfoTwoUpgradeTx]) + ) + ).wait(); expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(3); }); it("Should commit successfully when batch was reverted and reupgraded", async () => { - await (await proxyExecutor.revertBatches(2)).wait(); + await (await proxyExecutor.revertBatchesSharedBridge(chainId, 2)).wait(); const timestamp = (await hardhat.ethers.provider.getBlock("latest")).timestamp; const systemLogs = createSystemLogs(); systemLogs.push( @@ -768,7 +787,12 @@ describe.only("L2 upgrade test", function () { systemLogs ); - const commitReceipt = await (await proxyExecutor.commitBatches(storedBatch2Info, [batch3InfoTwoUpgradeTx])).wait(); + const commitReceipt = await ( + await proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(storedBatch2Info, [batch3InfoTwoUpgradeTx]) + ) + ).wait(); expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(3); const commitment = commitReceipt.events[0].args.commitment; @@ -781,7 +805,7 @@ describe.only("L2 upgrade test", function () { it("Should successfully commit a sequential upgrade", async () => { expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(0); await ( - await executeUpgrade(chainId, proxyGetters, stateTransitionManager, proxyAdmin, { + await executeUpgrade(chainId, proxyGetters, chainTypeManager, proxyAdmin, { newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 5 + 1, 0), l2ProtocolUpgradeTx: noopUpgradeTransaction, }) @@ -805,7 +829,12 @@ describe.only("L2 upgrade test", function () { systemLogs ); - const commitReceipt = await (await proxyExecutor.commitBatches(storedBatch2Info, [batch4InfoTwoUpgradeTx])).wait(); + const commitReceipt = await ( + await proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(storedBatch2Info, [batch4InfoTwoUpgradeTx]) + ) + ).wait(); const commitment = commitReceipt.events[0].args.commitment; const newBatchStoredInfo = getBatchStoredInfo(batch4InfoTwoUpgradeTx, commitment); @@ -820,7 +849,7 @@ describe.only("L2 upgrade test", function () { it("Should successfully commit custom upgrade", async () => { const upgradeReceipt = await ( - await executeCustomUpgrade(chainId, proxyGetters, proxyAdmin, stateTransitionManager, { + await executeCustomUpgrade(chainId, proxyGetters, proxyAdmin, chainTypeManager, { newProtocolVersion: addToProtocolVersion(initialProtocolVersion, 6 + 1, 0), l2ProtocolUpgradeTx: noopUpgradeTransaction, }) @@ -859,7 +888,12 @@ describe.only("L2 upgrade test", function () { systemLogs ); - const commitReceipt = await (await proxyExecutor.commitBatches(storedBatch2Info, [batch5InfoTwoUpgradeTx])).wait(); + const commitReceipt = await ( + await proxyExecutor.commitBatchesSharedBridge( + chainId, + ...encodeCommitBatchesData(storedBatch2Info, [batch5InfoTwoUpgradeTx]) + ) + ).wait(); const commitment = commitReceipt.events[0].args.commitment; const newBatchStoredInfo = getBatchStoredInfo(batch5InfoTwoUpgradeTx, commitment); @@ -876,7 +910,14 @@ async function buildCommitBatchInfo( info: CommitBatchInfoWithTimestamp ): Promise { const timestamp = info.timestamp || (await hardhat.ethers.provider.getBlock("latest")).timestamp; - const systemLogs = createSystemLogs(info.priorityOperationsHash, info.numberOfLayer1Txs, prevInfo.batchHash); + const [fullPubdataCommitment, l1DAOutputHash] = buildL2DARollupPubdataCommitment(ethers.constants.HashZero, "0x"); + + const systemLogs = createSystemLogs( + info.priorityOperationsHash, + info.numberOfLayer1Txs, + prevInfo.batchHash, + l1DAOutputHash + ); systemLogs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, @@ -891,7 +932,7 @@ async function buildCommitBatchInfo( numberOfLayer1Txs: 0, priorityOperationsHash: EMPTY_STRING_KECCAK, systemLogs: ethers.utils.hexConcat(systemLogs), - pubdataCommitments: `0x${"0".repeat(130)}`, + operatorDAInput: fullPubdataCommitment, bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), eventsQueueStateHash: ethers.utils.randomBytes(32), ...info, @@ -904,12 +945,20 @@ async function buildCommitBatchInfoWithCustomLogs( systemLogs: string[] ): Promise { const timestamp = info.timestamp || (await hardhat.ethers.provider.getBlock("latest")).timestamp; + const [fullPubdataCommitment, l1DAOutputHash] = buildL2DARollupPubdataCommitment(ethers.constants.HashZero, "0x"); + systemLogs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, packBatchTimestampAndBatchTimestamp(timestamp, timestamp) ); + systemLogs[SYSTEM_LOG_KEYS.L2_DA_VALIDATOR_OUTPUT_HASH_KEY] = constructL2Log( + true, + L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS.L2_DA_VALIDATOR_OUTPUT_HASH_KEY, + l1DAOutputHash + ); return { timestamp, @@ -918,7 +967,7 @@ async function buildCommitBatchInfoWithCustomLogs( numberOfLayer1Txs: 0, priorityOperationsHash: EMPTY_STRING_KECCAK, systemLogs: ethers.utils.hexConcat(systemLogs), - pubdataCommitments: `0x${"0".repeat(130)}`, + operatorDAInput: fullPubdataCommitment, bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), eventsQueueStateHash: ethers.utils.randomBytes(32), ...info, @@ -956,7 +1005,7 @@ function buildProposeUpgrade(proposedUpgrade: PartialProposedUpgrade): ProposedU async function executeUpgrade( chainId: BigNumberish, proxyGetters: GettersFacet, - stateTransitionManager: StateTransitionManager, + chainTypeManager: ChainTypeManager, proxyAdmin: AdminFacet, partialUpgrade: Partial, contractFactory?: ethers.ethers.ContractFactory @@ -982,7 +1031,7 @@ async function executeUpgrade( const oldProtocolVersion = await proxyGetters.getProtocolVersion(); // This promise will be handled in the tests ( - await stateTransitionManager.setNewVersionUpgrade( + await chainTypeManager.setNewVersionUpgrade( diamondCutData, oldProtocolVersion, 999999999999, @@ -995,7 +1044,7 @@ async function executeUpgrade( // we rollback the protocolVersion ( we don't clear the upgradeHash mapping, but that is ok) async function rollBackToVersion( protocolVersion: string, - stateTransition: StateTransitionManager, + stateTransition: ChainTypeManager, partialUpgrade: Partial ) { partialUpgrade.newProtocolVersion = protocolVersion; @@ -1026,7 +1075,7 @@ async function executeCustomUpgrade( chainId: BigNumberish, proxyGetters: GettersFacet, proxyAdmin: AdminFacet, - stateTransition: StateTransitionManager, + stateTransition: ChainTypeManager, partialUpgrade: Partial, contractFactory?: ethers.ethers.ContractFactory ) { diff --git a/l1-contracts/test/unit_tests/legacy_era_test.spec.ts b/l1-contracts/test/unit_tests/legacy_era_test.spec.ts index 92a86dc7d..5a0d6e995 100644 --- a/l1-contracts/test/unit_tests/legacy_era_test.spec.ts +++ b/l1-contracts/test/unit_tests/legacy_era_test.spec.ts @@ -1,22 +1,23 @@ import { expect } from "chai"; import { ethers, Wallet } from "ethers"; import * as hardhat from "hardhat"; +import type { BytesLike } from "ethers/lib/utils"; import { Interface } from "ethers/lib/utils"; -import type { Bridgehub, L1SharedBridge, GettersFacet, MockExecutorFacet } from "../../typechain"; +import type { Bridgehub, GettersFacet, MockExecutorFacet } from "../../typechain"; import { - L1SharedBridgeFactory, BridgehubFactory, TestnetERC20TokenFactory, MailboxFacetFactory, GettersFacetFactory, MockExecutorFacetFactory, + L1NullifierFactory, } from "../../typechain"; import type { IL1ERC20Bridge } from "../../typechain/IL1ERC20Bridge"; import { IL1ERC20BridgeFactory } from "../../typechain/IL1ERC20BridgeFactory"; import type { IMailbox } from "../../typechain/IMailbox"; -import { ADDRESS_ONE, ethTestConfig } from "../../src.ts/utils"; +import { ethTestConfig } from "../../src.ts/utils"; import { Action, facetCut } from "../../src.ts/diamondCut"; import { getTokens } from "../../src.ts/deploy-token"; import type { Deployer } from "../../src.ts/deploy"; @@ -28,6 +29,8 @@ import { L2_TO_L1_MESSENGER, getCallRevertReason, requestExecuteDirect, + DUMMY_MERKLE_PROOF_START, + DUMMY_MERKLE_PROOF_2_START, } from "./utils"; // This test is mimicking the legacy Era functions. Era's Address was known at the upgrade, so we hardcoded them in the contracts, @@ -41,17 +44,20 @@ describe("Legacy Era tests", function () { let deployer: Deployer; let l1ERC20BridgeAddress: string; let l1ERC20Bridge: IL1ERC20Bridge; - let sharedBridgeProxy: L1SharedBridge; + // let sharedBridgeProxy: L1AssetRouter; let erc20TestToken: ethers.Contract; let bridgehub: Bridgehub; let chainId = "9"; // Hardhat config ERA_CHAIN_ID const functionSignature = "0x11a2ccc1"; + let l2ToL1message: BytesLike; let mailbox: IMailbox; let getter: GettersFacet; let proxyAsMockExecutor: MockExecutorFacet; const MAX_CODE_LEN_WORDS = (1 << 16) - 1; const MAX_CODE_LEN_BYTES = MAX_CODE_LEN_WORDS * 32; + const dummyProof = Array(9).fill(ethers.constants.HashZero); + dummyProof[0] = DUMMY_MERKLE_PROOF_START; before(async () => { [owner, randomSigner] = await hardhat.ethers.getSigners(); @@ -85,7 +91,7 @@ describe("Legacy Era tests", function () { l1ERC20BridgeAddress = deployer.addresses.Bridges.ERC20BridgeProxy; l1ERC20Bridge = IL1ERC20BridgeFactory.connect(l1ERC20BridgeAddress, deployWallet); - sharedBridgeProxy = L1SharedBridgeFactory.connect(deployer.addresses.Bridges.SharedBridgeProxy, deployWallet); + // sharedBridgeProxy = L1AssetRouterFactory.connect(deployer.addresses.Bridges.SharedBridgeProxy, deployWallet); const tokens = getTokens(); const tokenAddress = tokens.find((token: { symbol: string }) => token.symbol == "DAI")!.address; @@ -94,16 +100,19 @@ describe("Legacy Era tests", function () { await erc20TestToken.mint(await randomSigner.getAddress(), ethers.utils.parseUnits("10000", 18)); await erc20TestToken.connect(randomSigner).approve(l1ERC20BridgeAddress, ethers.utils.parseUnits("10000", 18)); - const sharedBridgeFactory = await hardhat.ethers.getContractFactory("L1SharedBridge"); + const sharedBridgeFactory = await hardhat.ethers.getContractFactory("L1AssetRouter"); const l1WethToken = tokens.find((token: { symbol: string }) => token.symbol == "WETH")!.address; const sharedBridge = await sharedBridgeFactory.deploy( l1WethToken, deployer.addresses.Bridgehub.BridgehubProxy, + deployer.addresses.Bridges.L1NullifierProxy, deployer.chainId, deployer.addresses.StateTransition.DiamondProxy ); - const proxyAdminInterface = new Interface(hardhat.artifacts.readArtifactSync("ProxyAdmin").abi); + const proxyAdminInterface = new Interface( + hardhat.artifacts.readArtifactSync("@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol:ProxyAdmin").abi + ); const calldata = proxyAdminInterface.encodeFunctionData("upgrade(address,address)", [ deployer.addresses.Bridges.SharedBridgeProxy, sharedBridge.address, @@ -111,9 +120,16 @@ describe("Legacy Era tests", function () { await deployer.executeUpgrade(deployer.addresses.TransparentProxyAdmin, 0, calldata); if (deployer.verbose) { - console.log("L1SharedBridge upgrade sent for testing"); + console.log("L1AssetRouter upgrade sent for testing"); } + const setL1Erc20BridgeCalldata = L1NullifierFactory.connect( + deployer.addresses.Bridges.L1NullifierProxy, + deployWallet + ).interface.encodeFunctionData("setL1Erc20Bridge", [l1ERC20Bridge.address]); + + await deployer.executeUpgrade(deployer.addresses.Bridges.L1NullifierProxy, 0, setL1Erc20BridgeCalldata); + mailbox = MailboxFacetFactory.connect(deployer.addresses.StateTransition.DiamondProxy, deployWallet); getter = GettersFacetFactory.connect(deployer.addresses.StateTransition.DiamondProxy, deployWallet); @@ -121,18 +137,24 @@ describe("Legacy Era tests", function () { deployer.addresses.StateTransition.DiamondProxy, mockExecutorContract.signer ); - }); - it("Check should initialize through governance", async () => { - const l1SharedBridgeInterface = new Interface(hardhat.artifacts.readArtifactSync("L1SharedBridge").abi); - const upgradeCall = l1SharedBridgeInterface.encodeFunctionData("initializeChainGovernance(uint256,address)", [ - chainId, - ADDRESS_ONE, - ]); + await ( + await proxyAsMockExecutor.saveL2LogsRootHash( + 1, + "0x0000000000000000000000000000000000000000000000000000000000000001" + ) + ).wait(); - const txHash = await deployer.executeUpgrade(sharedBridgeProxy.address, 0, upgradeCall); + const txExecute = await proxyAsMockExecutor.setExecutedBatches(1); + await txExecute.wait(); - expect(txHash).not.equal(ethers.constants.HashZero); + const l1Receiver = await randomSigner.getAddress(); + l2ToL1message = ethers.utils.hexConcat([ + functionSignature, + l1Receiver, + erc20TestToken.address, + ethers.constants.HashZero, + ]); }); it("Should not allow depositing zero amount", async () => { @@ -142,7 +164,7 @@ describe("Legacy Era tests", function () { "deposit(address,address,uint256,uint256,uint256,address)" ](await randomSigner.getAddress(), erc20TestToken.address, 0, 0, 0, ethers.constants.AddressZero) ); - expect(revertReason).equal("0T"); + expect(revertReason).contains("EmptyDeposit"); }); it("Should deposit successfully", async () => { @@ -151,6 +173,7 @@ describe("Legacy Era tests", function () { l1ERC20Bridge.connect(randomSigner), bridgehub, chainId, + deployer.l1ChainId, depositorAddress, erc20TestToken.address, ethers.utils.parseUnits("800", 18), @@ -159,33 +182,29 @@ describe("Legacy Era tests", function () { }); it("Should revert on finalizing a withdrawal with wrong message length", async () => { + const mailboxFunctionSignature = "0x6c0960f9"; const revertReason = await getCallRevertReason( - l1ERC20Bridge.connect(randomSigner).finalizeWithdrawal(0, 0, 0, "0x", [ethers.constants.HashZero]) + l1ERC20Bridge + .connect(randomSigner) + .finalizeWithdrawal(1, 0, 0, mailboxFunctionSignature, [ethers.constants.HashZero]) ); - expect(revertReason).equal("ShB wrong msg len"); + expect(revertReason).contains("L2WithdrawalMessageWrongLength(4)"); }); it("Should revert on finalizing a withdrawal with wrong function signature", async () => { const revertReason = await getCallRevertReason( l1ERC20Bridge .connect(randomSigner) - .finalizeWithdrawal(0, 0, 0, ethers.utils.randomBytes(76), [ethers.constants.HashZero]) + .finalizeWithdrawal(1, 0, 0, ethers.utils.randomBytes(76), [ethers.constants.HashZero]) ); - expect(revertReason).equal("ShB Incorrect message function selector"); + expect(revertReason).contains("InvalidSelector"); }); it("Should revert on finalizing a withdrawal with wrong batch number", async () => { - const l1Receiver = await randomSigner.getAddress(); - const l2ToL1message = ethers.utils.hexConcat([ - functionSignature, - l1Receiver, - erc20TestToken.address, - ethers.constants.HashZero, - ]); const revertReason = await getCallRevertReason( - l1ERC20Bridge.connect(randomSigner).finalizeWithdrawal(10, 0, 0, l2ToL1message, []) + l1ERC20Bridge.connect(randomSigner).finalizeWithdrawal(10, 0, 0, l2ToL1message, dummyProof) ); - expect(revertReason).equal("xx"); + expect(revertReason).contains("BatchNotExecuted"); }); it("Should revert on finalizing a withdrawal with wrong length of proof", async () => { @@ -196,26 +215,14 @@ describe("Legacy Era tests", function () { erc20TestToken.address, ethers.constants.HashZero, ]); - const revertReason = await getCallRevertReason( - l1ERC20Bridge.connect(randomSigner).finalizeWithdrawal(0, 0, 0, l2ToL1message, []) - ); - expect(revertReason).equal("xc"); + await expect(l1ERC20Bridge.connect(randomSigner).finalizeWithdrawal(0, 0, 0, l2ToL1message, [])).to.be.reverted; }); it("Should revert on finalizing a withdrawal with wrong proof", async () => { - const l1Receiver = await randomSigner.getAddress(); - const l2ToL1message = ethers.utils.hexConcat([ - functionSignature, - l1Receiver, - erc20TestToken.address, - ethers.constants.HashZero, - ]); const revertReason = await getCallRevertReason( - l1ERC20Bridge - .connect(randomSigner) - .finalizeWithdrawal(0, 0, 0, l2ToL1message, Array(9).fill(ethers.constants.HashZero)) + l1ERC20Bridge.connect(randomSigner).finalizeWithdrawal(1, 0, 0, l2ToL1message, dummyProof) ); - expect(revertReason).equal("ShB withd w proof"); + expect(revertReason).contains("InvalidProof"); }); /////////// Mailbox. Note we have these two together because we need to fix ERA Diamond proxy Address @@ -237,7 +244,7 @@ describe("Legacy Era tests", function () { ) ); - expect(revertReason).equal("pp"); + expect(revertReason).contains("MalformedBytecode"); }); describe("finalizeEthWithdrawal", function () { @@ -257,6 +264,7 @@ describe("Legacy Era tests", function () { ); const MERKLE_PROOF = [ + DUMMY_MERKLE_PROOF_2_START, "0x72abee45b59e344af8a6e520241c4744aff26ed411f4c4b00f8af09adada43ba", "0xc3d03eebfd83049991ea3d3e358b6712e7aa2e2e63dc2d4b438987cec28ac8d0", "0xe3697c7f33c31a9b0f0aeb8542287d0d21e8c4cf82163d0c44c7a98aa11aa111", @@ -269,7 +277,7 @@ describe("Legacy Era tests", function () { ]; let L2_LOGS_TREE_ROOT = HASHED_LOG; - for (let i = 0; i < MERKLE_PROOF.length; i++) { + for (let i = 1; i < MERKLE_PROOF.length; i++) { L2_LOGS_TREE_ROOT = ethers.utils.keccak256(L2_LOGS_TREE_ROOT + MERKLE_PROOF[i].slice(2)); } @@ -279,12 +287,12 @@ describe("Legacy Era tests", function () { it("Reverts when proof is invalid", async () => { const invalidProof = [...MERKLE_PROOF]; - invalidProof[0] = "0x72abee45b59e344af8a6e520241c4744aff26ed411f4c4b00f8af09adada43bb"; + invalidProof[1] = "0x72abee45b59e344af8a6e520241c4744aff26ed411f4c4b00f8af09adada43bb"; const revertReason = await getCallRevertReason( mailbox.finalizeEthWithdrawal(BLOCK_NUMBER, MESSAGE_INDEX, TX_NUMBER_IN_BLOCK, MESSAGE, invalidProof) ); - expect(revertReason).equal("ShB withd w proof"); + expect(revertReason).contains("InvalidProof"); }); it("Successful deposit", async () => { @@ -306,7 +314,6 @@ describe("Legacy Era tests", function () { it("Successful withdrawal", async () => { const balanceBefore = await hardhat.ethers.provider.getBalance(L1_RECEIVER); - await mailbox.finalizeEthWithdrawal(BLOCK_NUMBER, MESSAGE_INDEX, TX_NUMBER_IN_BLOCK, MESSAGE, MERKLE_PROOF); const balanceAfter = await hardhat.ethers.provider.getBalance(L1_RECEIVER); expect(balanceAfter.sub(balanceBefore)).equal(AMOUNT); @@ -316,7 +323,7 @@ describe("Legacy Era tests", function () { const revertReason = await getCallRevertReason( mailbox.finalizeEthWithdrawal(BLOCK_NUMBER, MESSAGE_INDEX, TX_NUMBER_IN_BLOCK, MESSAGE, MERKLE_PROOF) ); - expect(revertReason).equal("Withdrawal is already finalized"); + expect(revertReason).contains("WithdrawalAlreadyFinalized"); }); }); }); diff --git a/l1-contracts/test/unit_tests/mailbox_test.spec.ts b/l1-contracts/test/unit_tests/mailbox_test.spec.ts index 7210ccc44..c78cc646d 100644 --- a/l1-contracts/test/unit_tests/mailbox_test.spec.ts +++ b/l1-contracts/test/unit_tests/mailbox_test.spec.ts @@ -105,7 +105,7 @@ describe("Mailbox tests", function () { ) ); - expect(revertReason).equal("pq"); + expect(revertReason).contains("LengthIsNotDivisibleBy32(63)"); }); it("Should not accept bytecode of even length in words", async () => { @@ -122,7 +122,7 @@ describe("Mailbox tests", function () { ) ); - expect(revertReason).equal("ps"); + expect(revertReason).contains("MalformedBytecode"); }); describe("finalizeEthWithdrawal", function () { @@ -167,21 +167,21 @@ describe("Mailbox tests", function () { const revertReason = await getCallRevertReason( mailbox.finalizeEthWithdrawal(BLOCK_NUMBER, MESSAGE_INDEX, TX_NUMBER_IN_BLOCK, MESSAGE, invalidProof) ); - expect(revertReason).equal("Mailbox: finalizeEthWithdrawal only available for Era on mailbox"); + expect(revertReason).contains("OnlyEraSupported"); }); it("Successful withdrawal", async () => { const revertReason = await getCallRevertReason( mailbox.finalizeEthWithdrawal(BLOCK_NUMBER, MESSAGE_INDEX, TX_NUMBER_IN_BLOCK, MESSAGE, MERKLE_PROOF) ); - expect(revertReason).equal("Mailbox: finalizeEthWithdrawal only available for Era on mailbox"); + expect(revertReason).contains("OnlyEraSupported"); }); it("Reverts when withdrawal is already finalized", async () => { const revertReason = await getCallRevertReason( mailbox.finalizeEthWithdrawal(BLOCK_NUMBER, MESSAGE_INDEX, TX_NUMBER_IN_BLOCK, MESSAGE, MERKLE_PROOF) ); - expect(revertReason).equal("Mailbox: finalizeEthWithdrawal only available for Era on mailbox"); + expect(revertReason).contains("OnlyEraSupported"); }); }); @@ -199,7 +199,10 @@ describe("Mailbox tests", function () { before(async () => { const mailboxTestContractFactory = await hardhat.ethers.getContractFactory("MailboxFacetTest"); - const mailboxTestContract = await mailboxTestContractFactory.deploy(chainId); + const mailboxTestContract = await mailboxTestContractFactory.deploy( + chainId, + await mailboxTestContractFactory.signer.getChainId() + ); testContract = MailboxFacetTestFactory.connect(mailboxTestContract.address, mailboxTestContract.signer); // Generating 10 more gas prices for test suit @@ -329,7 +332,7 @@ describe("Mailbox tests", function () { for (const [refundRecipient, externallyOwned] of refundRecipients) { const result = await sendTransaction(refundRecipient); - const [, event2] = (await result.transaction.wait()).logs; + const [, , event2] = (await result.transaction.wait()).logs; const parsedEvent = mailbox.interface.parseLog(event2); expect(parsedEvent.name).to.equal("NewPriorityRequest"); diff --git a/l1-contracts/test/unit_tests/proxy_test.spec.ts b/l1-contracts/test/unit_tests/proxy_test.spec.ts index 3c3ae6429..46067e16f 100644 --- a/l1-contracts/test/unit_tests/proxy_test.spec.ts +++ b/l1-contracts/test/unit_tests/proxy_test.spec.ts @@ -45,7 +45,7 @@ describe("Diamond proxy tests", function () { diamondInit = DiamondInitFactory.connect(diamondInitContract.address, diamondInitContract.signer); const adminFactory = await hardhat.ethers.getContractFactory("AdminFacet"); - const adminContract = await adminFactory.deploy(); + const adminContract = await adminFactory.deploy(await owner.getChainId()); adminFacet = AdminFacetFactory.connect(adminContract.address, adminContract.signer); const gettersFacetFactory = await hardhat.ethers.getContractFactory("GettersFacet"); @@ -53,7 +53,7 @@ describe("Diamond proxy tests", function () { gettersFacet = GettersFacetFactory.connect(gettersFacetContract.address, gettersFacetContract.signer); const mailboxFacetFactory = await hardhat.ethers.getContractFactory("MailboxFacet"); - const mailboxFacetContract = await mailboxFacetFactory.deploy(chainId); + const mailboxFacetContract = await mailboxFacetFactory.deploy(chainId, await owner.getChainId()); mailboxFacet = MailboxFacetFactory.connect(mailboxFacetContract.address, mailboxFacetContract.signer); const executorFactory = await hardhat.ethers.getContractFactory("ExecutorFacet"); @@ -63,6 +63,9 @@ describe("Diamond proxy tests", function () { const diamondProxyTestFactory = await hardhat.ethers.getContractFactory("DiamondProxyTest"); const diamondProxyTestContract = await diamondProxyTestFactory.deploy(); + const dummyBridgehubFactory = await hardhat.ethers.getContractFactory("DummyBridgehub"); + const dummyBridgehub = await dummyBridgehubFactory.deploy(); + diamondProxyTest = DiamondProxyTestFactory.connect( diamondProxyTestContract.address, diamondProxyTestContract.signer @@ -84,12 +87,12 @@ describe("Diamond proxy tests", function () { const diamondInitCalldata = diamondInit.interface.encodeFunctionData("initialize", [ { chainId, - bridgehub: "0x0000000000000000000000000000000000000001", - stateTransitionManager: await owner.getAddress(), + bridgehub: dummyBridgehub.address, + chainTypeManager: await owner.getAddress(), protocolVersion: 0, admin: governorAddress, validatorTimelock: governorAddress, - baseToken: "0x0000000000000000000000000000000000000001", + baseTokenAssetId: "0x0000000000000000000000000000000000000000000000000000000000000001", baseTokenBridge: "0x0000000000000000000000000000000000000001", storedBatchZero: "0x02c775f0a90abf7a0e8043f2fdc38f0580ca9f9996a895d05a501bfeaa3b2e21", verifier: "0x0000000000000000000000000000000000000001", @@ -134,14 +137,14 @@ describe("Diamond proxy tests", function () { const proxyAsERC20 = TestnetERC20TokenFactory.connect(proxy.address, proxy.signer); const revertReason = await getCallRevertReason(proxyAsERC20.transfer(proxyAsERC20.address, 0)); - expect(revertReason).equal("F"); + expect(revertReason).contains("F"); }); it("check that proxy reject data with no selector", async () => { const dataWithoutSelector = "0x1122"; const revertReason = await getCallRevertReason(proxy.fallback({ data: dataWithoutSelector })); - expect(revertReason).equal("Ut"); + expect(revertReason).contains("Ut"); }); it("should freeze the diamond storage", async () => { @@ -178,7 +181,7 @@ describe("Diamond proxy tests", function () { data: executorFacetSelector3 + "0000000000000000000000000000000000000000000000000000000000000000", }) ); - expect(revertReason).equal("q1"); + expect(revertReason).contains("q1"); }); it("should be able to call an unfreezable facet when diamondStorage is frozen", async () => { diff --git a/l1-contracts/test/unit_tests/utils.ts b/l1-contracts/test/unit_tests/utils.ts index 2bbf51733..af74fa28f 100644 --- a/l1-contracts/test/unit_tests/utils.ts +++ b/l1-contracts/test/unit_tests/utils.ts @@ -2,7 +2,6 @@ import * as hardhat from "hardhat"; import type { BigNumberish, BytesLike } from "ethers"; import { BigNumber, ethers } from "ethers"; import type { Address } from "zksync-ethers/build/types"; -import { REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT } from "zksync-ethers/build/utils"; import type { IBridgehub } from "../../typechain/IBridgehub"; import type { IL1ERC20Bridge } from "../../typechain/IL1ERC20Bridge"; @@ -11,15 +10,25 @@ import type { IMailbox } from "../../typechain/IMailbox"; import type { ExecutorFacet } from "../../typechain"; import type { FeeParams, L2CanonicalTransaction } from "../../src.ts/utils"; -import { ADDRESS_ONE, PubdataPricingMode, EMPTY_STRING_KECCAK } from "../../src.ts/utils"; +import { + ADDRESS_ONE, + PubdataPricingMode, + EMPTY_STRING_KECCAK, + STORED_BATCH_INFO_ABI_STRING, + COMMIT_BATCH_INFO_ABI_STRING, + PRIORITY_OPS_BATCH_INFO_ABI_STRING, +} from "../../src.ts/utils"; import { packSemver } from "../../scripts/utils"; +import { keccak256, hexConcat, defaultAbiCoder } from "ethers/lib/utils"; export const CONTRACTS_GENESIS_PROTOCOL_VERSION = packSemver(0, 21, 0).toString(); // eslint-disable-next-line @typescript-eslint/no-var-requires -export const IERC20_INTERFACE = require("@openzeppelin/contracts/build/contracts/IERC20"); +export const IERC20_INTERFACE = require("@openzeppelin/contracts-v4/build/contracts/IERC20"); export const DEFAULT_REVERT_REASON = "VM did not revert"; export const DEFAULT_L2_LOGS_TREE_ROOT_HASH = "0x0000000000000000000000000000000000000000000000000000000000000000"; +export const DUMMY_MERKLE_PROOF_START = "0x0101000000000000000000000000000000000000000000000000000000000000"; +export const DUMMY_MERKLE_PROOF_2_START = "0x0109000000000000000000000000000000000000000000000000000000000000"; export const L2_SYSTEM_CONTEXT_ADDRESS = "0x000000000000000000000000000000000000800b"; export const L2_BOOTLOADER_ADDRESS = "0x0000000000000000000000000000000000008001"; export const L2_KNOWN_CODE_STORAGE_ADDRESS = "0x0000000000000000000000000000000000008004"; @@ -28,7 +37,6 @@ export const L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR = "0x000000000000000000000000000 export const L2_BYTECODE_COMPRESSOR_ADDRESS = "0x000000000000000000000000000000000000800e"; export const DEPLOYER_SYSTEM_CONTRACT_ADDRESS = "0x0000000000000000000000000000000000008006"; export const PUBDATA_CHUNK_PUBLISHER_ADDRESS = "0x0000000000000000000000000000000000008011"; -const PUBDATA_HASH = "0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563"; export const SYSTEM_UPGRADE_TX_TYPE = 254; @@ -38,18 +46,12 @@ export function randomAddress() { export enum SYSTEM_LOG_KEYS { L2_TO_L1_LOGS_TREE_ROOT_KEY, - TOTAL_L2_TO_L1_PUBDATA_KEY, - STATE_DIFF_HASH_KEY, PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, PREV_BATCH_HASH_KEY, CHAINED_PRIORITY_TXN_HASH_KEY, NUMBER_OF_LAYER_1_TXS_KEY, - BLOB_ONE_HASH_KEY, - BLOB_TWO_HASH_KEY, - BLOB_THREE_HASH_KEY, - BLOB_FOUR_HASH_KEY, - BLOB_FIVE_HASH_KEY, - BLOB_SIX_HASH_KEY, + L2_DA_VALIDATOR_OUTPUT_HASH_KEY, + USED_L2_DA_VALIDATOR_ADDRESS_KEY, EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY, } @@ -98,7 +100,21 @@ export async function getCallRevertReason(promise) { } } } catch (_) { - throw e; + try { + if ( + revertReason === "cannot estimate gas; transaction may fail or may require manual gas limit" || + revertReason === DEFAULT_REVERT_REASON + ) { + if (e.error) { + revertReason = + e.error.toString().match(/reverted with custom error '([^']*)'/)[1] || "PLACEHOLDER_STRING"; + } else { + revertReason = e.toString().match(/reverted with custom error '([^']*)'/)[1] || "PLACEHOLDER_STRING"; + } + } + } catch (_) { + throw e; + } } } } @@ -196,12 +212,11 @@ export function constructL2Log(isService: boolean, sender: string, key: number | export function createSystemLogs( chainedPriorityTxHashKey?: BytesLike, numberOfLayer1Txs?: BigNumberish, - previousBatchHash?: BytesLike + previousBatchHash?: BytesLike, + l2DaValidatorOutputHash?: BytesLike ) { return [ constructL2Log(true, L2_TO_L1_MESSENGER, SYSTEM_LOG_KEYS.L2_TO_L1_LOGS_TREE_ROOT_KEY, ethers.constants.HashZero), - constructL2Log(true, L2_TO_L1_MESSENGER, SYSTEM_LOG_KEYS.TOTAL_L2_TO_L1_PUBDATA_KEY, PUBDATA_HASH), - constructL2Log(true, L2_TO_L1_MESSENGER, SYSTEM_LOG_KEYS.STATE_DIFF_HASH_KEY, ethers.constants.HashZero), constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, @@ -226,27 +241,19 @@ export function createSystemLogs( SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY, numberOfLayer1Txs ? numberOfLayer1Txs.toString() : ethers.constants.HashZero ), - constructL2Log(true, PUBDATA_CHUNK_PUBLISHER_ADDRESS, SYSTEM_LOG_KEYS.BLOB_ONE_HASH_KEY, ethers.constants.HashZero), - constructL2Log(true, PUBDATA_CHUNK_PUBLISHER_ADDRESS, SYSTEM_LOG_KEYS.BLOB_TWO_HASH_KEY, ethers.constants.HashZero), - constructL2Log( - true, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, - SYSTEM_LOG_KEYS.BLOB_THREE_HASH_KEY, - ethers.constants.HashZero - ), + constructL2Log( true, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, - SYSTEM_LOG_KEYS.BLOB_FOUR_HASH_KEY, - ethers.constants.HashZero + L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS.L2_DA_VALIDATOR_OUTPUT_HASH_KEY, + l2DaValidatorOutputHash ? ethers.utils.hexlify(l2DaValidatorOutputHash) : ethers.constants.HashZero ), constructL2Log( true, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, - SYSTEM_LOG_KEYS.BLOB_FIVE_HASH_KEY, - ethers.constants.HashZero + L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS.USED_L2_DA_VALIDATOR_ADDRESS_KEY, + process.env.CONTRACTS_L2_DA_VALIDATOR_ADDR ), - constructL2Log(true, PUBDATA_CHUNK_PUBLISHER_ADDRESS, SYSTEM_LOG_KEYS.BLOB_SIX_HASH_KEY, ethers.constants.HashZero), ]; } @@ -254,12 +261,11 @@ export function createSystemLogsWithUpgrade( chainedPriorityTxHashKey?: BytesLike, numberOfLayer1Txs?: BigNumberish, upgradeTxHash?: string, - previousBatchHash?: string + previousBatchHash?: string, + l2DaValidatorOutputHash?: BytesLike ) { return [ constructL2Log(true, L2_TO_L1_MESSENGER, SYSTEM_LOG_KEYS.L2_TO_L1_LOGS_TREE_ROOT_KEY, ethers.constants.HashZero), - constructL2Log(true, L2_TO_L1_MESSENGER, SYSTEM_LOG_KEYS.TOTAL_L2_TO_L1_PUBDATA_KEY, PUBDATA_HASH), - constructL2Log(true, L2_TO_L1_MESSENGER, SYSTEM_LOG_KEYS.STATE_DIFF_HASH_KEY, ethers.constants.HashZero), constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, @@ -284,27 +290,18 @@ export function createSystemLogsWithUpgrade( SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY, numberOfLayer1Txs ? numberOfLayer1Txs.toString() : ethers.constants.HashZero ), - constructL2Log(true, PUBDATA_CHUNK_PUBLISHER_ADDRESS, SYSTEM_LOG_KEYS.BLOB_ONE_HASH_KEY, ethers.constants.HashZero), - constructL2Log(true, PUBDATA_CHUNK_PUBLISHER_ADDRESS, SYSTEM_LOG_KEYS.BLOB_TWO_HASH_KEY, ethers.constants.HashZero), constructL2Log( true, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, - SYSTEM_LOG_KEYS.BLOB_THREE_HASH_KEY, - ethers.constants.HashZero + L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS.L2_DA_VALIDATOR_OUTPUT_HASH_KEY, + ethers.utils.hexlify(l2DaValidatorOutputHash) || ethers.constants.HashZero ), constructL2Log( true, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, - SYSTEM_LOG_KEYS.BLOB_FOUR_HASH_KEY, - ethers.constants.HashZero + L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS.USED_L2_DA_VALIDATOR_ADDRESS_KEY, + process.env.CONTRACTS_L2_DA_VALIDATOR_ADDR || ethers.constants.AddressZero ), - constructL2Log( - true, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, - SYSTEM_LOG_KEYS.BLOB_FIVE_HASH_KEY, - ethers.constants.HashZero - ), - constructL2Log(true, PUBDATA_CHUNK_PUBLISHER_ADDRESS, SYSTEM_LOG_KEYS.BLOB_SIX_HASH_KEY, ethers.constants.HashZero), constructL2Log( true, L2_BOOTLOADER_ADDRESS, @@ -369,13 +366,20 @@ export interface CommitBatchInfo { bootloaderHeapInitialContentsHash: BytesLike; eventsQueueStateHash: BytesLike; systemLogs: BytesLike; - pubdataCommitments: BytesLike; + operatorDAInput: BytesLike; +} + +export interface PriorityOpsBatchInfo { + leftPath: Array; + rightPath: Array; + itemHashes: Array; } export async function depositERC20( bridge: IL1ERC20Bridge, bridgehubContract: IBridgehub, chainId: string, + l1ChainId: number, l2Receiver: string, l1Token: string, amount: ethers.BigNumber, @@ -383,7 +387,7 @@ export async function depositERC20( l2RefundRecipient = ethers.constants.AddressZero ) { const gasPrice = await bridge.provider.getGasPrice(); - const gasPerPubdata = REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT; + const gasPerPubdata = REQUIRED_L2_GAS_PRICE_PER_PUBDATA; const neededValue = await bridgehubContract.l2TransactionBaseCost(chainId, gasPrice, l2GasLimit, gasPerPubdata); const ethIsBaseToken = (await bridgehubContract.baseToken(chainId)) == ADDRESS_ONE; @@ -392,7 +396,7 @@ export async function depositERC20( l1Token, amount, l2GasLimit, - REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT, + REQUIRED_L2_GAS_PRICE_PER_PUBDATA, l2RefundRecipient, { value: ethIsBaseToken ? neededValue : 0, @@ -407,7 +411,7 @@ export function buildL2CanonicalTransaction(tx: Partial) from: ethers.constants.AddressZero, to: ethers.constants.AddressZero, gasLimit: 5000000, - gasPerPubdataByteLimit: REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT, + gasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, maxFeePerGas: 0, maxPriorityFeePerGas: 0, paymaster: 0, @@ -427,17 +431,60 @@ export type CommitBatchInfoWithTimestamp = Partial & { batchNumber: BigNumberish; }; +function padStringWithZeroes(str: string, lenBytes: number): string { + const strLen = lenBytes * 2; + if (str.length > strLen) { + throw new Error("String is too long"); + } + const paddingLength = strLen - str.length; + return str + "0".repeat(paddingLength); +} + +// Returns a pair of strings: +// - the expected pubdata commitemnt +// - the required rollup l2 da hash output +export function buildL2DARollupPubdataCommitment(stateDiffHash: string, fullPubdata: string): [string, string] { + const BLOB_SIZE_BYTES = 126_976; + const fullPubdataHash = ethers.utils.keccak256(fullPubdata); + if (ethers.utils.arrayify(fullPubdata).length > BLOB_SIZE_BYTES) { + throw new Error("Too much pubdata"); + } + const blobsProvided = 1; + + const blobLinearHash = keccak256(padStringWithZeroes(fullPubdata, BLOB_SIZE_BYTES)); + + const l1DAOutput = ethers.utils.hexConcat([ + stateDiffHash, + fullPubdataHash, + ethers.utils.hexlify(blobsProvided), + blobLinearHash, + ]); + const l1DAOutputHash = ethers.utils.keccak256(l1DAOutput); + + // After the header the 00 byte is for "calldata" mode. + // Then, there is the full pubdata. + // Then, there are 32 bytes for blob commitment. They must have at least one non-zero byte, + // so it will be the last one. + const fullPubdataCommitment = `${l1DAOutput}00${fullPubdata.slice(2)}${"0".repeat(62)}01`; + + return [fullPubdataCommitment, l1DAOutputHash]; +} + export async function buildCommitBatchInfoWithUpgrade( prevInfo: StoredBatchInfo, info: CommitBatchInfoWithTimestamp, upgradeTxHash: string ): Promise { const timestamp = info.timestamp || (await hardhat.ethers.provider.getBlock("latest")).timestamp; + + const [fullPubdataCommitment, l1DAOutputHash] = buildL2DARollupPubdataCommitment(ethers.constants.HashZero, "0x"); + const systemLogs = createSystemLogsWithUpgrade( info.priorityOperationsHash, info.numberOfLayer1Txs, upgradeTxHash, - ethers.utils.hexlify(prevInfo.batchHash) + ethers.utils.hexlify(prevInfo.batchHash), + l1DAOutputHash ); systemLogs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, @@ -453,7 +500,7 @@ export async function buildCommitBatchInfoWithUpgrade( numberOfLayer1Txs: 0, priorityOperationsHash: EMPTY_STRING_KECCAK, systemLogs: ethers.utils.hexConcat(systemLogs), - pubdataCommitments: `0x${"0".repeat(130)}`, + operatorDAInput: fullPubdataCommitment, bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), eventsQueueStateHash: ethers.utils.randomBytes(32), ...info, @@ -469,13 +516,13 @@ export async function makeExecutedEqualCommitted( batchesToExecute = [...batchesToProve, ...batchesToExecute]; await ( - await proxyExecutor.proveBatches(prevBatchInfo, batchesToProve, { - recursiveAggregationInput: [], - serializedProof: [], - }) + await proxyExecutor.proveBatchesSharedBridge(0, ...encodeProveBatchesData(prevBatchInfo, batchesToProve, [])) ).wait(); - await (await proxyExecutor.executeBatches(batchesToExecute)).wait(); + const dummyMerkleProofs = batchesToExecute.map(() => ({ leftPath: [], rightPath: [], itemHashes: [] })); + await ( + await proxyExecutor.executeBatchesSharedBridge(0, ...encodeExecuteBatchesData(batchesToExecute, dummyMerkleProofs)) + ).wait(); } export function getBatchStoredInfo(commitInfo: CommitBatchInfo, commitment: string): StoredBatchInfo { @@ -490,3 +537,40 @@ export function getBatchStoredInfo(commitInfo: CommitBatchInfo, commitment: stri commitment: commitment, }; } + +export function encodeCommitBatchesData( + storedBatchInfo: StoredBatchInfo, + commitBatchInfos: Array +): [BigNumberish, BigNumberish, string] { + const encodedCommitDataWithoutVersion = defaultAbiCoder.encode( + [STORED_BATCH_INFO_ABI_STRING, `${COMMIT_BATCH_INFO_ABI_STRING}[]`], + [storedBatchInfo, commitBatchInfos] + ); + const commitData = hexConcat(["0x00", encodedCommitDataWithoutVersion]); + return [commitBatchInfos[0].batchNumber, commitBatchInfos[commitBatchInfos.length - 1].batchNumber, commitData]; +} + +export function encodeProveBatchesData( + prevBatch: StoredBatchInfo, + committedBatches: Array, + proof: Array +): [BigNumberish, BigNumberish, string] { + const encodedProveDataWithoutVersion = defaultAbiCoder.encode( + [STORED_BATCH_INFO_ABI_STRING, `${STORED_BATCH_INFO_ABI_STRING}[]`, "uint256[]"], + [prevBatch, committedBatches, proof] + ); + const proveData = hexConcat(["0x00", encodedProveDataWithoutVersion]); + return [committedBatches[0].batchNumber, committedBatches[committedBatches.length - 1].batchNumber, proveData]; +} + +export function encodeExecuteBatchesData( + batchesData: Array, + priorityOpsBatchInfo: Array +): [BigNumberish, BigNumberish, string] { + const encodedExecuteDataWithoutVersion = defaultAbiCoder.encode( + [`${STORED_BATCH_INFO_ABI_STRING}[]`, `${PRIORITY_OPS_BATCH_INFO_ABI_STRING}[]`], + [batchesData, priorityOpsBatchInfo] + ); + const executeData = hexConcat(["0x00", encodedExecuteDataWithoutVersion]); + return [batchesData[0].batchNumber, batchesData[batchesData.length - 1].batchNumber, executeData]; +} diff --git a/l1-contracts/test/unit_tests/validator_timelock_test.spec.ts b/l1-contracts/test/unit_tests/validator_timelock_test.spec.ts deleted file mode 100644 index 119cae7cc..000000000 --- a/l1-contracts/test/unit_tests/validator_timelock_test.spec.ts +++ /dev/null @@ -1,280 +0,0 @@ -import { expect } from "chai"; -import { ethers } from "ethers"; -import * as hardhat from "hardhat"; -import type { DummyExecutor, ValidatorTimelock, DummyStateTransitionManager } from "../../typechain"; -import { DummyExecutorFactory, ValidatorTimelockFactory, DummyStateTransitionManagerFactory } from "../../typechain"; -import { getCallRevertReason } from "./utils"; - -describe("ValidatorTimelock tests", function () { - let owner: ethers.Signer; - let validator: ethers.Signer; - let randomSigner: ethers.Signer; - let validatorTimelock: ValidatorTimelock; - let dummyExecutor: DummyExecutor; - let dummyStateTransitionManager: DummyStateTransitionManager; - const chainId: number = 270; - - const MOCK_PROOF_INPUT = { - recursiveAggregationInput: [], - serializedProof: [], - }; - - function getMockCommitBatchInfo(batchNumber: number, timestamp: number = 0) { - return { - batchNumber, - timestamp, - indexRepeatedStorageChanges: 0, - newStateRoot: ethers.constants.HashZero, - numberOfLayer1Txs: 0, - priorityOperationsHash: ethers.constants.HashZero, - bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), - eventsQueueStateHash: ethers.utils.randomBytes(32), - systemLogs: [], - pubdataCommitments: - "0x00290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e56300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - }; - } - - function getMockStoredBatchInfo(batchNumber: number, timestamp: number = 0) { - return { - batchNumber, - batchHash: ethers.constants.HashZero, - indexRepeatedStorageChanges: 0, - numberOfLayer1Txs: 0, - priorityOperationsHash: ethers.constants.HashZero, - l2LogsTreeRoot: ethers.constants.HashZero, - timestamp, - commitment: ethers.constants.HashZero, - }; - } - - before(async () => { - [owner, validator, randomSigner] = await hardhat.ethers.getSigners(); - - const dummyExecutorFactory = await hardhat.ethers.getContractFactory("DummyExecutor"); - const dummyExecutorContract = await dummyExecutorFactory.deploy(); - dummyExecutor = DummyExecutorFactory.connect(dummyExecutorContract.address, dummyExecutorContract.signer); - - const dummyStateTransitionManagerFactory = await hardhat.ethers.getContractFactory("DummyStateTransitionManager"); - const dummyStateTransitionManagerContract = await dummyStateTransitionManagerFactory.deploy(); - dummyStateTransitionManager = DummyStateTransitionManagerFactory.connect( - dummyStateTransitionManagerContract.address, - dummyStateTransitionManagerContract.signer - ); - - const setSTtx = await dummyStateTransitionManager.setHyperchain(chainId, dummyExecutor.address); - await setSTtx.wait(); - - const validatorTimelockFactory = await hardhat.ethers.getContractFactory("ValidatorTimelock"); - const validatorTimelockContract = await validatorTimelockFactory.deploy(await owner.getAddress(), 0, chainId); - validatorTimelock = ValidatorTimelockFactory.connect( - validatorTimelockContract.address, - validatorTimelockContract.signer - ); - const setSTMtx = await validatorTimelock.setStateTransitionManager(dummyStateTransitionManager.address); - await setSTMtx.wait(); - }); - - it("Should check deployment", async () => { - expect(await validatorTimelock.owner()).equal(await owner.getAddress()); - expect(await validatorTimelock.executionDelay()).equal(0); - expect(await validatorTimelock.validators(chainId, ethers.constants.AddressZero)).equal(false); - expect(await validatorTimelock.stateTransitionManager()).equal(dummyStateTransitionManager.address); - expect(await dummyStateTransitionManager.getHyperchain(chainId)).equal(dummyExecutor.address); - expect(await dummyStateTransitionManager.getChainAdmin(chainId)).equal(await owner.getAddress()); - expect(await dummyExecutor.getAdmin()).equal(await owner.getAddress()); - }); - - it("Should revert if non-validator commits batches", async () => { - const revertReason = await getCallRevertReason( - validatorTimelock.connect(randomSigner).commitBatches(getMockStoredBatchInfo(0), [getMockCommitBatchInfo(1)]) - ); - - expect(revertReason).equal("ValidatorTimelock: only validator"); - }); - - it("Should revert if non-validator proves batches", async () => { - const revertReason = await getCallRevertReason( - validatorTimelock - .connect(randomSigner) - .proveBatches(getMockStoredBatchInfo(0), [getMockStoredBatchInfo(1)], MOCK_PROOF_INPUT) - ); - - expect(revertReason).equal("ValidatorTimelock: only validator"); - }); - - it("Should revert if non-validator revert batches", async () => { - const revertReason = await getCallRevertReason(validatorTimelock.connect(randomSigner).revertBatches(1)); - - expect(revertReason).equal("ValidatorTimelock: only validator"); - }); - - it("Should revert if non-validator executes batches", async () => { - const revertReason = await getCallRevertReason( - validatorTimelock.connect(randomSigner).executeBatches([getMockStoredBatchInfo(1)]) - ); - - expect(revertReason).equal("ValidatorTimelock: only validator"); - }); - - it("Should revert if not chain governor sets validator", async () => { - const revertReason = await getCallRevertReason( - validatorTimelock.connect(randomSigner).addValidator(chainId, await randomSigner.getAddress()) - ); - - expect(revertReason).equal("ValidatorTimelock: only chain admin"); - }); - - it("Should revert if non-owner sets execution delay", async () => { - const revertReason = await getCallRevertReason(validatorTimelock.connect(randomSigner).setExecutionDelay(1000)); - - expect(revertReason).equal("Ownable: caller is not the owner"); - }); - - it("Should successfully set the validator", async () => { - const validatorAddress = await validator.getAddress(); - await validatorTimelock.connect(owner).addValidator(chainId, validatorAddress); - - expect(await validatorTimelock.validators(chainId, validatorAddress)).equal(true); - }); - - it("Should successfully set the execution delay", async () => { - await validatorTimelock.connect(owner).setExecutionDelay(10); // set to 10 seconds - - expect(await validatorTimelock.executionDelay()).equal(10); - }); - - it("Should successfully commit batches", async () => { - await validatorTimelock - .connect(validator) - .commitBatchesSharedBridge(chainId, getMockStoredBatchInfo(0), [getMockCommitBatchInfo(1)]); - - expect(await dummyExecutor.getTotalBatchesCommitted()).equal(1); - }); - - it("Should successfully prove batches", async () => { - await validatorTimelock - .connect(validator) - .proveBatchesSharedBridge(chainId, getMockStoredBatchInfo(0), [getMockStoredBatchInfo(1, 1)], MOCK_PROOF_INPUT); - - expect(await dummyExecutor.getTotalBatchesVerified()).equal(1); - }); - - it("Should revert on executing earlier than the delay", async () => { - const revertReason = await getCallRevertReason( - validatorTimelock.connect(validator).executeBatchesSharedBridge(chainId, [getMockStoredBatchInfo(1)]) - ); - - expect(revertReason).equal("5c"); - }); - - it("Should successfully revert batches", async () => { - await validatorTimelock.connect(validator).revertBatchesSharedBridge(chainId, 0); - - expect(await dummyExecutor.getTotalBatchesVerified()).equal(0); - expect(await dummyExecutor.getTotalBatchesCommitted()).equal(0); - }); - - it("Should successfully overwrite the committing timestamp on the reverted batches timestamp", async () => { - const revertedBatchesTimestamp = Number(await validatorTimelock.getCommittedBatchTimestamp(chainId, 1)); - - await validatorTimelock - .connect(validator) - .commitBatchesSharedBridge(chainId, getMockStoredBatchInfo(0), [getMockCommitBatchInfo(1)]); - - await validatorTimelock - .connect(validator) - .proveBatchesSharedBridge(chainId, getMockStoredBatchInfo(0), [getMockStoredBatchInfo(1)], MOCK_PROOF_INPUT); - - const newBatchesTimestamp = Number(await validatorTimelock.getCommittedBatchTimestamp(chainId, 1)); - - expect(newBatchesTimestamp).greaterThanOrEqual(revertedBatchesTimestamp); - }); - - it("Should successfully execute batches after the delay", async () => { - await hardhat.network.provider.send("hardhat_mine", ["0x2", "0xc"]); //mine 2 batches with intervals of 12 seconds - await validatorTimelock.connect(validator).executeBatchesSharedBridge(chainId, [getMockStoredBatchInfo(1)]); - expect(await dummyExecutor.getTotalBatchesExecuted()).equal(1); - }); - - it("Should revert if validator tries to commit batches with invalid last committed batchNumber", async () => { - const revertReason = await getCallRevertReason( - validatorTimelock - .connect(validator) - .commitBatchesSharedBridge(chainId, getMockStoredBatchInfo(0), [getMockCommitBatchInfo(2)]) - ); - - // Error should be forwarded from the DummyExecutor - expect(revertReason).equal("DummyExecutor: Invalid last committed batch number"); - }); - - // Test case to check if proving batches with invalid batchNumber fails - it("Should revert if validator tries to prove batches with invalid batchNumber", async () => { - const revertReason = await getCallRevertReason( - validatorTimelock - .connect(validator) - .proveBatchesSharedBridge(chainId, getMockStoredBatchInfo(0), [getMockStoredBatchInfo(2, 1)], MOCK_PROOF_INPUT) - ); - - expect(revertReason).equal("DummyExecutor: Invalid previous batch number"); - }); - - it("Should revert if validator tries to execute more batches than were proven", async () => { - await hardhat.network.provider.send("hardhat_mine", ["0x2", "0xc"]); //mine 2 batches with intervals of 12 seconds - const revertReason = await getCallRevertReason( - validatorTimelock.connect(validator).executeBatchesSharedBridge(chainId, [getMockStoredBatchInfo(2)]) - ); - - expect(revertReason).equal("DummyExecutor 2: Can"); - }); - - // These tests primarily needed to make gas statistics be more accurate. - - it("Should commit multiple batches in one transaction", async () => { - await validatorTimelock - .connect(validator) - .commitBatchesSharedBridge(chainId, getMockStoredBatchInfo(1), [ - getMockCommitBatchInfo(2), - getMockCommitBatchInfo(3), - getMockCommitBatchInfo(4), - getMockCommitBatchInfo(5), - getMockCommitBatchInfo(6), - getMockCommitBatchInfo(7), - getMockCommitBatchInfo(8), - ]); - - expect(await dummyExecutor.getTotalBatchesCommitted()).equal(8); - }); - - it("Should prove multiple batches in one transactions", async () => { - for (let i = 1; i < 8; i++) { - await validatorTimelock - .connect(validator) - .proveBatchesSharedBridge( - chainId, - getMockStoredBatchInfo(i), - [getMockStoredBatchInfo(i + 1)], - MOCK_PROOF_INPUT - ); - - expect(await dummyExecutor.getTotalBatchesVerified()).equal(i + 1); - } - }); - - it("Should execute multiple batches in multiple transactions", async () => { - await hardhat.network.provider.send("hardhat_mine", ["0x2", "0xc"]); //mine 2 batches with intervals of 12 seconds - await validatorTimelock - .connect(validator) - .executeBatchesSharedBridge(chainId, [ - getMockStoredBatchInfo(2), - getMockStoredBatchInfo(3), - getMockStoredBatchInfo(4), - getMockStoredBatchInfo(5), - getMockStoredBatchInfo(6), - getMockStoredBatchInfo(7), - getMockStoredBatchInfo(8), - ]); - - expect(await dummyExecutor.getTotalBatchesExecuted()).equal(8); - }); -}); diff --git a/l2-contracts/.gitignore b/l2-contracts/.gitignore new file mode 100644 index 000000000..16d545bb0 --- /dev/null +++ b/l2-contracts/.gitignore @@ -0,0 +1,15 @@ +# Compiler files +cache/ +out/ +zkout/ + +# Ignores development broadcast logs +!/broadcast +/broadcast/*/31337/ +/broadcast/**/dry-run/ + +# Docs +docs/ + +# Dotenv file +.env diff --git a/l2-contracts/contracts/ConsensusRegistry.sol b/l2-contracts/contracts/ConsensusRegistry.sol index 514a4f205..de5af6340 100644 --- a/l2-contracts/contracts/ConsensusRegistry.sol +++ b/l2-contracts/contracts/ConsensusRegistry.sol @@ -1,9 +1,9 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; -import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable/access/Ownable2StepUpgradeable.sol"; -import {Initializable} from "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; +import {Ownable2StepUpgradeable} from "@openzeppelin/contracts-upgradeable-v4/access/Ownable2StepUpgradeable.sol"; +import {Initializable} from "@openzeppelin/contracts-upgradeable-v4/proxy/utils/Initializable.sol"; import {IConsensusRegistry} from "./interfaces/IConsensusRegistry.sol"; /// @author Matter Labs diff --git a/l2-contracts/contracts/Dependencies.sol b/l2-contracts/contracts/Dependencies.sol index bb8adf1f5..8a606d45a 100644 --- a/l2-contracts/contracts/Dependencies.sol +++ b/l2-contracts/contracts/Dependencies.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /* solhint-disable-next-line no-unused-import */ -import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; /* solhint-disable-next-line no-unused-import */ -import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol"; +import {ProxyAdmin} from "@openzeppelin/contracts-v4/proxy/transparent/ProxyAdmin.sol"; diff --git a/l2-contracts/contracts/ForceDeployUpgrader.sol b/l2-contracts/contracts/ForceDeployUpgrader.sol index 9abb22555..a7de60a2a 100644 --- a/l2-contracts/contracts/ForceDeployUpgrader.sol +++ b/l2-contracts/contracts/ForceDeployUpgrader.sol @@ -1,12 +1,12 @@ // SPDX-License-Identifier: MIT OR Apache-2.0 -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IContractDeployer, DEPLOYER_SYSTEM_CONTRACT} from "./L2ContractHelper.sol"; /// @custom:security-contact security@matterlabs.dev /// @notice The contract that calls force deployment during the L2 system contract upgrade. -/// @notice It is supposed to be used as an implementation of the ComplexUpgrader. +/// @notice It is supposed to be used inherited by an implementation of the ComplexUpgrader. (but it is not useful in itself) contract ForceDeployUpgrader { /// @notice A function that performs force deploy /// @param _forceDeployments The force deployments to perform. diff --git a/l2-contracts/contracts/L2ContractHelper.sol b/l2-contracts/contracts/L2ContractHelper.sol index 79090a3a9..620e9b3ee 100644 --- a/l2-contracts/contracts/L2ContractHelper.sol +++ b/l2-contracts/contracts/L2ContractHelper.sol @@ -1,6 +1,9 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; -pragma solidity 0.8.20; +import {EfficientCall} from "@matterlabs/zksync-contracts/l2/system-contracts/libraries/EfficientCall.sol"; +import {MalformedBytecode, BytecodeError} from "./errors/L2ContractErrors.sol"; /** * @author Matter Labs @@ -51,6 +54,13 @@ interface IContractDeployer { /// @param _bytecodeHash the bytecodehash of the new contract to be deployed /// @param _input the calldata to be sent to the constructor of the new contract function create2(bytes32 _salt, bytes32 _bytecodeHash, bytes calldata _input) external returns (address); + + function getNewAddressCreate2( + address _sender, + bytes32 _bytecodeHash, + bytes32 _salt, + bytes calldata _input + ) external view returns (address newAddress); } /** @@ -65,16 +75,56 @@ interface IBaseToken { function withdrawWithMessage(address _l1Receiver, bytes memory _additionalData) external payable; } +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice The interface for the Compressor contract, responsible for verifying the correctness of + * the compression of the state diffs and bytecodes. + */ +interface ICompressor { + function verifyCompressedStateDiffs( + uint256 _numberOfStateDiffs, + uint256 _enumerationIndexSize, + bytes calldata _stateDiffs, + bytes calldata _compressedStateDiffs + ) external returns (bytes32 stateDiffHash); +} + +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice Interface for contract responsible chunking pubdata into the appropriate size for EIP-4844 blobs. + */ +interface IPubdataChunkPublisher { + /// @notice Chunks pubdata into pieces that can fit into blobs. + /// @param _pubdata The total l2 to l1 pubdata that will be sent via L1 blobs. + /// @dev Note: This is an early implementation, in the future we plan to support up to 16 blobs per l1 batch. + function chunkPubdataToBlobs(bytes calldata _pubdata) external pure returns (bytes32[] memory blobLinearHashes); +} + uint160 constant SYSTEM_CONTRACTS_OFFSET = 0x8000; // 2^15 +/// @dev The offset from which the built-in, but user space contracts are located. +uint160 constant USER_CONTRACTS_OFFSET = 0x10000; // 2^16 + address constant BOOTLOADER_ADDRESS = address(SYSTEM_CONTRACTS_OFFSET + 0x01); address constant MSG_VALUE_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x09); address constant DEPLOYER_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x06); +address constant L2_BRIDGEHUB_ADDRESS = address(USER_CONTRACTS_OFFSET + 0x02); + +uint256 constant L1_CHAIN_ID = 1; + IL2Messenger constant L2_MESSENGER = IL2Messenger(address(SYSTEM_CONTRACTS_OFFSET + 0x08)); IBaseToken constant L2_BASE_TOKEN_ADDRESS = IBaseToken(address(SYSTEM_CONTRACTS_OFFSET + 0x0a)); +ICompressor constant COMPRESSOR_CONTRACT = ICompressor(address(SYSTEM_CONTRACTS_OFFSET + 0x0e)); + +IPubdataChunkPublisher constant PUBDATA_CHUNK_PUBLISHER = IPubdataChunkPublisher( + address(SYSTEM_CONTRACTS_OFFSET + 0x11) +); + /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev @@ -112,9 +162,69 @@ library L2ContractHelper { return address(uint160(uint256(data))); } + + /// @notice Validate the bytecode format and calculate its hash. + /// @param _bytecode The bytecode to hash. + /// @return hashedBytecode The 32-byte hash of the bytecode. + /// Note: The function reverts the execution if the bytecode has non expected format: + /// - Bytecode bytes length is not a multiple of 32 + /// - Bytecode bytes length is not less than 2^21 bytes (2^16 words) + /// - Bytecode words length is not odd + function hashL2Bytecode(bytes calldata _bytecode) internal view returns (bytes32 hashedBytecode) { + // Note that the length of the bytecode must be provided in 32-byte words. + if (_bytecode.length % 32 != 0) { + revert MalformedBytecode(BytecodeError.Length); + } + + uint256 bytecodeLenInWords = _bytecode.length / 32; + // bytecode length must be less than 2^16 words + if (bytecodeLenInWords >= 2 ** 16) { + revert MalformedBytecode(BytecodeError.NumberOfWords); + } + // bytecode length in words must be odd + if (bytecodeLenInWords % 2 == 0) { + revert MalformedBytecode(BytecodeError.WordsMustBeOdd); + } + hashedBytecode = + EfficientCall.sha(_bytecode) & + 0x00000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF; + // Setting the version of the hash + hashedBytecode = (hashedBytecode | bytes32(uint256(1 << 248))); + // Setting the length + hashedBytecode = hashedBytecode | bytes32(bytecodeLenInWords << 224); + } + + /// @notice Validate the bytecode format and calculate its hash. + /// @param _bytecode The bytecode to hash. + /// @return hashedBytecode The 32-byte hash of the bytecode. + /// Note: The function reverts the execution if the bytecode has non expected format: + /// - Bytecode bytes length is not a multiple of 32 + /// - Bytecode bytes length is not less than 2^21 bytes (2^16 words) + /// - Bytecode words length is not odd + function hashL2BytecodeMemory(bytes memory _bytecode) internal view returns (bytes32 hashedBytecode) { + // Note that the length of the bytecode must be provided in 32-byte words. + if (_bytecode.length % 32 != 0) { + revert MalformedBytecode(BytecodeError.Length); + } + + uint256 bytecodeLenInWords = _bytecode.length / 32; + // bytecode length must be less than 2^16 words + if (bytecodeLenInWords >= 2 ** 16) { + revert MalformedBytecode(BytecodeError.NumberOfWords); + } + // bytecode length in words must be odd + if (bytecodeLenInWords % 2 == 0) { + revert MalformedBytecode(BytecodeError.WordsMustBeOdd); + } + hashedBytecode = sha256(_bytecode) & 0x00000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF; + // Setting the version of the hash + hashedBytecode = (hashedBytecode | bytes32(uint256(1 << 248))); + // Setting the length + hashedBytecode = hashedBytecode | bytes32(bytecodeLenInWords << 224); + } } -/// @notice Structure used to represent a zkSync transaction. +/// @notice Structure used to represent a ZKsync transaction. struct Transaction { // The type of the transaction. uint256 txType; diff --git a/l2-contracts/contracts/SystemContractsCaller.sol b/l2-contracts/contracts/SystemContractsCaller.sol index 36153eb6d..ba3136792 100644 --- a/l2-contracts/contracts/SystemContractsCaller.sol +++ b/l2-contracts/contracts/SystemContractsCaller.sol @@ -1,6 +1,8 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +// solhint-disable one-contract-per-file +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {MSG_VALUE_SYSTEM_CONTRACT} from "./L2ContractHelper.sol"; @@ -23,9 +25,14 @@ enum CalldataForwardingMode { UseAuxHeap } +/// @notice Error thrown a cast from uint256 to u32 is not possible. +error U32CastOverflow(); + library Utils { function safeCastToU32(uint256 _x) internal pure returns (uint32) { - require(_x <= type(uint32).max, "Overflow"); + if (_x > type(uint32).max) { + revert U32CastOverflow(); + } return uint32(_x); } @@ -41,7 +48,7 @@ library SystemContractsCaller { assembly { dataStart := add(data, 0x20) } - uint32 dataLength = uint32(Utils.safeCastToU32(data.length)); + uint32 dataLength = Utils.safeCastToU32(data.length); uint256 farCallAbi = getFarCallABI({ dataOffset: 0, diff --git a/l2-contracts/contracts/TestnetPaymaster.sol b/l2-contracts/contracts/TestnetPaymaster.sol index 41ce678c1..54558fe3b 100644 --- a/l2-contracts/contracts/TestnetPaymaster.sol +++ b/l2-contracts/contracts/TestnetPaymaster.sol @@ -1,12 +1,13 @@ // SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; -pragma solidity 0.8.20; - -import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import {IERC20} from "@openzeppelin/contracts-v4/token/ERC20/IERC20.sol"; import {IPaymaster, ExecutionResult, PAYMASTER_VALIDATION_SUCCESS_MAGIC} from "./interfaces/IPaymaster.sol"; import {IPaymasterFlow} from "./interfaces/IPaymasterFlow.sol"; import {Transaction, BOOTLOADER_ADDRESS} from "./L2ContractHelper.sol"; +import {Unauthorized, InvalidInput, InsufficientAllowance, FailedToTransferTokens, UnsupportedPaymasterFlow} from "./errors/L2ContractErrors.sol"; // This is a dummy paymaster. It expects the paymasterInput to contain its "signature" as well as the needed exchange rate. // It supports only approval-based paymaster flow. @@ -15,12 +16,17 @@ contract TestnetPaymaster is IPaymaster { bytes32, bytes32, Transaction calldata _transaction - ) external payable returns (bytes4 magic, bytes memory context) { + ) external payable returns (bytes4 magic, bytes memory) { // By default we consider the transaction as accepted. magic = PAYMASTER_VALIDATION_SUCCESS_MAGIC; - require(msg.sender == BOOTLOADER_ADDRESS, "Only bootloader can call this contract"); - require(_transaction.paymasterInput.length >= 4, "The standard paymaster input must be at least 4 bytes long"); + if (msg.sender != BOOTLOADER_ADDRESS) { + revert Unauthorized(msg.sender); + } + + if (_transaction.paymasterInput.length < 4) { + revert InvalidInput(); + } bytes4 paymasterInputSelector = bytes4(_transaction.paymasterInput[0:4]); if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { @@ -33,7 +39,9 @@ contract TestnetPaymaster is IPaymaster { address thisAddress = address(this); uint256 providedAllowance = IERC20(token).allowance(userAddress, thisAddress); - require(providedAllowance >= amount, "The user did not provide enough allowance"); + if (providedAllowance < amount) { + revert InsufficientAllowance(providedAllowance, amount); + } // The testnet paymaster exchanges X wei of the token to the X wei of ETH. uint256 requiredETH = _transaction.gasLimit * _transaction.maxFeePerGas; @@ -51,7 +59,7 @@ contract TestnetPaymaster is IPaymaster { // If the revert reason is empty or represented by just a function selector, // we replace the error with a more user-friendly message if (revertReason.length <= 4) { - revert("Failed to transferFrom from users' account"); + revert FailedToTransferTokens(token, thisAddress, amount); } else { assembly { revert(add(0x20, revertReason), mload(revertReason)) @@ -61,9 +69,11 @@ contract TestnetPaymaster is IPaymaster { // The bootloader never returns any data, so it can safely be ignored here. (bool success, ) = payable(BOOTLOADER_ADDRESS).call{value: requiredETH}(""); - require(success, "Failed to transfer funds to the bootloader"); + if (!success) { + revert FailedToTransferTokens(address(0), BOOTLOADER_ADDRESS, requiredETH); + } } else { - revert("Unsupported paymaster flow"); + revert UnsupportedPaymasterFlow(); } } diff --git a/l2-contracts/contracts/bridge/interfaces/IL1ERC20Bridge.sol b/l2-contracts/contracts/bridge/interfaces/IL1ERC20Bridge.sol deleted file mode 100644 index 407669613..000000000 --- a/l2-contracts/contracts/bridge/interfaces/IL1ERC20Bridge.sol +++ /dev/null @@ -1,16 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; - -/// @author Matter Labs -// note we use the IL1ERC20Bridge only to send L1<>L2 messages, -// and we use this interface so that when the switch happened the old messages could be processed -interface IL1ERC20Bridge { - function finalizeWithdrawal( - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external; -} diff --git a/l2-contracts/contracts/bridge/interfaces/IL1SharedBridge.sol b/l2-contracts/contracts/bridge/interfaces/IL1SharedBridge.sol deleted file mode 100644 index 8ec1ff757..000000000 --- a/l2-contracts/contracts/bridge/interfaces/IL1SharedBridge.sol +++ /dev/null @@ -1,17 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; - -/// @title L1 Bridge contract interface -/// @author Matter Labs -/// @custom:security-contact security@matterlabs.dev -interface IL1SharedBridge { - function finalizeWithdrawal( - uint256 _chainId, - uint256 _l2BatchNumber, - uint256 _l2MessageIndex, - uint16 _l2TxNumberInBatch, - bytes calldata _message, - bytes32[] calldata _merkleProof - ) external; -} diff --git a/l2-contracts/contracts/bridge/interfaces/IL2StandardToken.sol b/l2-contracts/contracts/bridge/interfaces/IL2StandardToken.sol deleted file mode 100644 index 6ceb1ae80..000000000 --- a/l2-contracts/contracts/bridge/interfaces/IL2StandardToken.sol +++ /dev/null @@ -1,19 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; - -interface IL2StandardToken { - event BridgeInitialize(address indexed l1Token, string name, string symbol, uint8 decimals); - - event BridgeMint(address indexed _account, uint256 _amount); - - event BridgeBurn(address indexed _account, uint256 _amount); - - function bridgeMint(address _account, uint256 _amount) external; - - function bridgeBurn(address _account, uint256 _amount) external; - - function l1Address() external view returns (address); - - function l2Bridge() external view returns (address); -} diff --git a/l2-contracts/contracts/data-availability/DAErrors.sol b/l2-contracts/contracts/data-availability/DAErrors.sol new file mode 100644 index 000000000..c3f032d2a --- /dev/null +++ b/l2-contracts/contracts/data-availability/DAErrors.sol @@ -0,0 +1,12 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +enum PubdataField { + MsgHash, + Bytecode, + StateDiffCompressionVersion, + ExtraData +} + +error ReconstructionMismatch(PubdataField, bytes32 expected, bytes32 actual); diff --git a/l2-contracts/contracts/data-availability/RollupL2DAValidator.sol b/l2-contracts/contracts/data-availability/RollupL2DAValidator.sol new file mode 100644 index 000000000..febedf625 --- /dev/null +++ b/l2-contracts/contracts/data-availability/RollupL2DAValidator.sol @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IL2DAValidator} from "../interfaces/IL2DAValidator.sol"; +import {StateDiffL2DAValidator} from "./StateDiffL2DAValidator.sol"; +import {PUBDATA_CHUNK_PUBLISHER} from "../L2ContractHelper.sol"; + +import {SafeCast} from "@openzeppelin/contracts-v4/utils/math/SafeCast.sol"; +import {EfficientCall} from "@matterlabs/zksync-contracts/l2/system-contracts/libraries/EfficientCall.sol"; + +import {ReconstructionMismatch, PubdataField} from "./DAErrors.sol"; + +/// Rollup DA validator. It will publish data that would allow to use either calldata or blobs. +contract RollupL2DAValidator is IL2DAValidator, StateDiffL2DAValidator { + function validatePubdata( + // The rolling hash of the user L2->L1 logs. + bytes32, + // The root hash of the user L2->L1 logs. + bytes32, + // The chained hash of the L2->L1 messages + bytes32 _chainedMessagesHash, + // The chained hash of uncompressed bytecodes sent to L1 + bytes32 _chainedBytecodesHash, + // Operator data, that is related to the DA itself + bytes calldata _totalL2ToL1PubdataAndStateDiffs + ) external returns (bytes32 outputHash) { + (bytes32 stateDiffHash, bytes calldata _totalPubdata, bytes calldata leftover) = _produceStateDiffPubdata( + _chainedMessagesHash, + _chainedBytecodesHash, + _totalL2ToL1PubdataAndStateDiffs + ); + + /// Check for calldata strict format + if (leftover.length != 0) { + revert ReconstructionMismatch(PubdataField.ExtraData, bytes32(0), bytes32(leftover.length)); + } + + // The preimage under the hash `outputHash` is expected to be in the following format: + // - First 32 bytes are the hash of the uncompressed state diff. + // - Then, there is a 32-byte hash of the full pubdata. + // - Then, there is the 1-byte number of blobs published. + // - Then, there are linear hashes of the published blobs, 32 bytes each. + + bytes32[] memory blobLinearHashes = PUBDATA_CHUNK_PUBLISHER.chunkPubdataToBlobs(_totalPubdata); + + outputHash = keccak256( + abi.encodePacked( + stateDiffHash, + EfficientCall.keccak(_totalPubdata), + SafeCast.toUint8(blobLinearHashes.length), + blobLinearHashes + ) + ); + } +} diff --git a/l2-contracts/contracts/data-availability/StateDiffL2DAValidator.sol b/l2-contracts/contracts/data-availability/StateDiffL2DAValidator.sol new file mode 100644 index 000000000..ab7d48636 --- /dev/null +++ b/l2-contracts/contracts/data-availability/StateDiffL2DAValidator.sol @@ -0,0 +1,132 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {ReconstructionMismatch, PubdataField} from "./DAErrors.sol"; +import {COMPRESSOR_CONTRACT, L2ContractHelper} from "../L2ContractHelper.sol"; + +import {EfficientCall} from "@matterlabs/zksync-contracts/l2/system-contracts/libraries/EfficientCall.sol"; + +/// @dev The current version of state diff compression being used. +uint256 constant STATE_DIFF_COMPRESSION_VERSION_NUMBER = 1; + +uint256 constant L2_TO_L1_LOG_SERIALIZE_SIZE = 88; + +/// @dev Each state diff consists of 156 bytes of actual data and 116 bytes of unused padding, needed for circuit efficiency. +uint256 constant STATE_DIFF_ENTRY_SIZE = 272; + +/// A library that could be used by any L2 DA validator to produce standard state-diff-based +/// DA output. +abstract contract StateDiffL2DAValidator { + /// @notice Validates, that the operator provided the correct preimages for logs, messages, and bytecodes. + /// @return uncompressedStateDiffHash the hash of the uncompressed state diffs + /// @return totalL2Pubdata total pubdata that should be sent to L1. + /// @return leftoverSuffix the suffix left after pubdata and uncompressed state diffs. + /// On Era or other "vanilla" rollups it is empty, but it can be used for providing additional data by the operator, + /// e.g. DA committee signatures, etc. + function _produceStateDiffPubdata( + bytes32 _chainedMessagesHash, + bytes32 _chainedBytecodesHash, + bytes calldata _totalL2ToL1PubdataAndStateDiffs + ) + internal + virtual + returns (bytes32 uncompressedStateDiffHash, bytes calldata totalL2Pubdata, bytes calldata leftoverSuffix) + { + uint256 calldataPtr = 0; + + /// Check logs + uint32 numberOfL2ToL1Logs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4 + numberOfL2ToL1Logs * L2_TO_L1_LOG_SERIALIZE_SIZE; + + /// Check messages + uint32 numberOfMessages = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + bytes32 reconstructedChainedMessagesHash; + for (uint256 i = 0; i < numberOfMessages; ++i) { + uint32 currentMessageLength = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + bytes32 hashedMessage = EfficientCall.keccak( + _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + currentMessageLength] + ); + calldataPtr += currentMessageLength; + reconstructedChainedMessagesHash = keccak256(abi.encode(reconstructedChainedMessagesHash, hashedMessage)); + } + if (reconstructedChainedMessagesHash != _chainedMessagesHash) { + revert ReconstructionMismatch(PubdataField.MsgHash, _chainedMessagesHash, reconstructedChainedMessagesHash); + } + + /// Check bytecodes + uint32 numberOfBytecodes = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + bytes32 reconstructedChainedL1BytecodesRevealDataHash; + for (uint256 i = 0; i < numberOfBytecodes; ++i) { + uint32 currentBytecodeLength = uint32( + bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4]) + ); + calldataPtr += 4; + reconstructedChainedL1BytecodesRevealDataHash = keccak256( + abi.encode( + reconstructedChainedL1BytecodesRevealDataHash, + L2ContractHelper.hashL2Bytecode( + _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + currentBytecodeLength] + ) + ) + ); + calldataPtr += currentBytecodeLength; + } + if (reconstructedChainedL1BytecodesRevealDataHash != _chainedBytecodesHash) { + revert ReconstructionMismatch( + PubdataField.Bytecode, + _chainedBytecodesHash, + reconstructedChainedL1BytecodesRevealDataHash + ); + } + + /// Check State Diffs + /// encoding is as follows: + /// header (1 byte version, 3 bytes total len of compressed, 1 byte enumeration index size) + /// body (`compressedStateDiffSize` bytes, 4 bytes number of state diffs, `numberOfStateDiffs` * `STATE_DIFF_ENTRY_SIZE` bytes for the uncompressed state diffs) + /// encoded state diffs: [20bytes address][32bytes key][32bytes derived key][8bytes enum index][32bytes initial value][32bytes final value] + if ( + uint256(uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr]))) != + STATE_DIFF_COMPRESSION_VERSION_NUMBER + ) { + revert ReconstructionMismatch( + PubdataField.StateDiffCompressionVersion, + bytes32(STATE_DIFF_COMPRESSION_VERSION_NUMBER), + bytes32(uint256(uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr])))) + ); + } + ++calldataPtr; + + uint24 compressedStateDiffSize = uint24(bytes3(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 3])); + calldataPtr += 3; + + uint8 enumerationIndexSize = uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr])); + ++calldataPtr; + + bytes calldata compressedStateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + + compressedStateDiffSize]; + calldataPtr += compressedStateDiffSize; + + totalL2Pubdata = _totalL2ToL1PubdataAndStateDiffs[:calldataPtr]; + + uint32 numberOfStateDiffs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + + bytes calldata stateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + + (numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE)]; + + uncompressedStateDiffHash = COMPRESSOR_CONTRACT.verifyCompressedStateDiffs( + numberOfStateDiffs, + enumerationIndexSize, + stateDiffs, + compressedStateDiffs + ); + + calldataPtr += numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; + + leftoverSuffix = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:]; + } +} diff --git a/l2-contracts/contracts/data-availability/ValidiumL2DAValidator.sol b/l2-contracts/contracts/data-availability/ValidiumL2DAValidator.sol new file mode 100644 index 000000000..5930131fc --- /dev/null +++ b/l2-contracts/contracts/data-availability/ValidiumL2DAValidator.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IL2DAValidator} from "../interfaces/IL2DAValidator.sol"; + +/// Rollup DA validator. It will publish data that would allow to use either calldata or blobs. +contract ValidiumL2DAValidator is IL2DAValidator { + function validatePubdata( + // The rolling hash of the user L2->L1 logs. + bytes32, + // The root hash of the user L2->L1 logs. + bytes32, + // The chained hash of the L2->L1 messages + bytes32, + // The chained hash of uncompressed bytecodes sent to L1 + bytes32, + // Operator data, that is related to the DA itself + bytes calldata + ) external returns (bytes32 outputHash) { + // Since we do not need to publish anything to L1, we can just return 0. + // Note, that Rollup validator sends the hash of uncompressed state diffs, since the + // correctness of the publish pubdata depends on it. However Validium doesn't sent anything, + // so we don't need to publish even that. + outputHash = bytes32(0); + } +} diff --git a/l2-contracts/contracts/dev-contracts/DevL2SharedBridge.sol b/l2-contracts/contracts/dev-contracts/DevL2SharedBridge.sol deleted file mode 100644 index 12a6a187f..000000000 --- a/l2-contracts/contracts/dev-contracts/DevL2SharedBridge.sol +++ /dev/null @@ -1,33 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; - -import {L2SharedBridge} from "../bridge/L2SharedBridge.sol"; -import {L2StandardERC20} from "../bridge/L2StandardERC20.sol"; -import {UpgradeableBeacon} from "@openzeppelin/contracts/proxy/beacon/UpgradeableBeacon.sol"; - -/// @author Matter Labs -/// @notice The implementation of the shared bridge that allows setting legacy bridge. Must only be used in local testing environments. -contract DevL2SharedBridge is L2SharedBridge { - constructor(uint256 _eraChainId) L2SharedBridge(_eraChainId) {} - - function initializeDevBridge( - address _l1SharedBridge, - address _l1Bridge, - bytes32 _l2TokenProxyBytecodeHash, - address _aliasedOwner - ) external reinitializer(2) { - l1SharedBridge = _l1SharedBridge; - - address l2StandardToken = address(new L2StandardERC20{salt: bytes32(0)}()); - l2TokenBeacon = new UpgradeableBeacon{salt: bytes32(0)}(l2StandardToken); - l2TokenProxyBytecodeHash = _l2TokenProxyBytecodeHash; - l2TokenBeacon.transferOwnership(_aliasedOwner); - - // Unfortunately the `l1Bridge` is not an internal variable in the parent contract. - // To keep the changes to the production code minimal, we'll just manually set the variable here. - assembly { - sstore(4, _l1Bridge) - } - } -} diff --git a/l2-contracts/contracts/dev-contracts/Multicall3.sol b/l2-contracts/contracts/dev-contracts/Multicall3.sol index 9a6c69340..aaa8b8012 100644 --- a/l2-contracts/contracts/dev-contracts/Multicall3.sol +++ b/l2-contracts/contracts/dev-contracts/Multicall3.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; /// @title Multicall3 /// @notice Aggregate results from multiple function calls diff --git a/l2-contracts/contracts/dev-contracts/VerifierRecursiveTest.sol b/l2-contracts/contracts/dev-contracts/VerifierRecursiveTest.sol new file mode 100644 index 000000000..2b1da08f0 --- /dev/null +++ b/l2-contracts/contracts/dev-contracts/VerifierRecursiveTest.sol @@ -0,0 +1,70 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Verifier} from "../verifier/Verifier.sol"; + +/// @author Matter Labs +contract VerifierRecursiveTest is Verifier { + // add this to be excluded from coverage report + function test() internal virtual {} + + function _loadVerificationKey() internal pure override { + assembly { + // gate setup commitments + mstore(VK_GATE_SETUP_0_X_SLOT, 0x046e45fd137982bd0f6cf731b4650d2d520e8d675827744e1edf1308583599bb) + mstore(VK_GATE_SETUP_0_Y_SLOT, 0x177f14d16b716d4298be5e07b83add3fb61ff1ee08dce19f9a54fa8f04937f7e) + mstore(VK_GATE_SETUP_1_X_SLOT, 0x169ad5156d25b56f7b67ea6382f88b845ed5bae5b91aacfe51d8f0716afff2fb) + mstore(VK_GATE_SETUP_1_Y_SLOT, 0x2406e3268e4d5fa672142998ecf834034638a4a6f8b5e90205552c6aa1dde163) + mstore(VK_GATE_SETUP_2_X_SLOT, 0x05fd0ce0fdc590938d29c738c8dc956b32ca8e69c3babfbb49dc1c13a6d9a8d4) + mstore(VK_GATE_SETUP_2_Y_SLOT, 0x0a27dac323a04dd319d9805be879875c95063d0a55c96214cd45c913fba84460) + mstore(VK_GATE_SETUP_3_X_SLOT, 0x0d58a2a86b208a4976beb9bfd918514d448656e0ee66175eb344a4a17bba99f8) + mstore(VK_GATE_SETUP_3_Y_SLOT, 0x215fa609a1a425b84c9dc218c6cf999596d9eba6d35597ad7aaf2d077a6616ed) + mstore(VK_GATE_SETUP_4_X_SLOT, 0x1a26e6deccf91174ab13613363eb4939680828f0c6031f5039f9e6f264afa68c) + mstore(VK_GATE_SETUP_4_Y_SLOT, 0x1f5b2d6bffac1839edfd02cd0e41acc411f0ecbf6c5c4b1da0e12b68b99cb25d) + mstore(VK_GATE_SETUP_5_X_SLOT, 0x09b71be2e8a45dcbe7654cf369c4f1f2e7eab4b97869a469fb7a149d989f7226) + mstore(VK_GATE_SETUP_5_Y_SLOT, 0x197e1e2cefbd4f99558b89ca875e01fec0f14f05e5128bd869c87d6bf2f307fa) + mstore(VK_GATE_SETUP_6_X_SLOT, 0x0d7cef745da686fd44760403d452d72be504bb41b0a7f4fbe973a07558893871) + mstore(VK_GATE_SETUP_6_Y_SLOT, 0x1e9a863307cdfd3fdcf119f72279ddfda08b6f23c3672e8378dbb9d548734c29) + mstore(VK_GATE_SETUP_7_X_SLOT, 0x16af3f5d978446fdb37d84f5cf12e59f5c1088bde23f8260c0bb6792c5f78e99) + mstore(VK_GATE_SETUP_7_Y_SLOT, 0x167d3aeee50c0e53fd1e8a33941a806a34cfae5dc8b66578486e5d7207b5d546) + + // gate selectors commitments + mstore(VK_GATE_SELECTORS_0_X_SLOT, 0x1addc8e154c74bed403dc19558096ce22f1ceb2c656a2a5e85e56d2be6580ed1) + mstore(VK_GATE_SELECTORS_0_Y_SLOT, 0x1420d38f0ef206828efc36d0f5ad2b4d85fe768097f358fc671b7b3ec0239234) + mstore(VK_GATE_SELECTORS_1_X_SLOT, 0x2d5c06d0c8aa6a3520b8351f82341affcbb1a0bf27bceb9bab175e3e1d38cf47) + mstore(VK_GATE_SELECTORS_1_Y_SLOT, 0x0ff8d923a0374308147f6dd4fc513f6d0640f5df699f4836825ef460df3f8d6a) + + // permutation commitments + mstore(VK_PERMUTATION_0_X_SLOT, 0x1de8943a8f67d9f6fcbda10a1f37a82de9e9ffd0a0102ea5ce0ce6dd13b4031b) + mstore(VK_PERMUTATION_0_Y_SLOT, 0x1e04b0824853ab5d7c3412a217a1c5b88a2b4011be7e7e849485be8ed7332e41) + mstore(VK_PERMUTATION_1_X_SLOT, 0x2aa1817b9cc40b6cc7a7b3f832f3267580f9fb8e539666c00541e1a77e34a3da) + mstore(VK_PERMUTATION_1_Y_SLOT, 0x0edb3cde226205b01212fc1861303c49ef3ff66f060b5833dc9a3f661ef31dd9) + mstore(VK_PERMUTATION_2_X_SLOT, 0x13f5ae93c8eccc1455a0095302923442d4b0b3c8233d66ded99ffcf2ad641c27) + mstore(VK_PERMUTATION_2_Y_SLOT, 0x2dd42d42ccdea8b1901435ace12bc9e52c7dbbeb409d20c517ba942ed0cc7519) + mstore(VK_PERMUTATION_3_X_SLOT, 0x1a15a70a016be11af71e46e9c8a8d31ece32a7e657ae90356dd9535e6566645f) + mstore(VK_PERMUTATION_3_Y_SLOT, 0x0381d23e115521c6fc233c5346f79a6777bfa8871b7ee623d990cdcb5d8c3ce1) + + // lookup tables commitments + mstore(VK_LOOKUP_TABLE_0_X_SLOT, 0x2c513ed74d9d57a5ec901e074032741036353a2c4513422e96e7b53b302d765b) + mstore(VK_LOOKUP_TABLE_0_Y_SLOT, 0x04dd964427e430f16004076d708c0cb21e225056cc1d57418cfbd3d472981468) + mstore(VK_LOOKUP_TABLE_1_X_SLOT, 0x1ea83e5e65c6f8068f4677e2911678cf329b28259642a32db1f14b8347828aac) + mstore(VK_LOOKUP_TABLE_1_Y_SLOT, 0x1d22bc884a2da4962a893ba8de13f57aaeb785ed52c5e686994839cab8f7475d) + mstore(VK_LOOKUP_TABLE_2_X_SLOT, 0x0b2e7212d0d9cff26d0bdf3d79b2cac029a25dfeb1cafdf49e2349d7db348d89) + mstore(VK_LOOKUP_TABLE_2_Y_SLOT, 0x1301f9b252419ea240eb67fda720ca0b16d92364027285f95e9b1349490fa283) + mstore(VK_LOOKUP_TABLE_3_X_SLOT, 0x02f7b99fdfa5b418548c2d777785820e02383cfc87e7085e280a375a358153bf) + mstore(VK_LOOKUP_TABLE_3_Y_SLOT, 0x09d004fe08dc4d19c382df36fad22ef676185663543703e6a4b40203e50fd8a6) + + // lookup selector commitment + mstore(VK_LOOKUP_SELECTOR_X_SLOT, 0x1641f5d312e6f62720b1e6cd1d1be5bc0e69d10d20a12dc97ff04e2107e10ccc) + mstore(VK_LOOKUP_SELECTOR_Y_SLOT, 0x277f435d376acc3261ef9d5748e6705086214daf46d04edc80fbd657f8d9e73d) + + // table type commitment + mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x1b5f1cfddd6713cf25d9e6850a1b3fe80d6ef7fe2c67248f25362d5f9b31893c) + mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x0945076de03a0d240067e5f02b8fc11eaa589df3343542576eb59fdb3ecb57e0) + + // flag for using recursive part + mstore(VK_RECURSIVE_FLAG_SLOT, 1) + } + } +} diff --git a/l2-contracts/contracts/dev-contracts/VerifierTest.sol b/l2-contracts/contracts/dev-contracts/VerifierTest.sol new file mode 100644 index 000000000..9c2db1c84 --- /dev/null +++ b/l2-contracts/contracts/dev-contracts/VerifierTest.sol @@ -0,0 +1,70 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Verifier} from "../verifier/Verifier.sol"; + +/// @author Matter Labs +contract VerifierTest is Verifier { + // add this to be excluded from coverage report + function test() internal virtual {} + + function _loadVerificationKey() internal pure override { + assembly { + // gate setup commitments + mstore(VK_GATE_SETUP_0_X_SLOT, 0x046e45fd137982bd0f6cf731b4650d2d520e8d675827744e1edf1308583599bb) + mstore(VK_GATE_SETUP_0_Y_SLOT, 0x177f14d16b716d4298be5e07b83add3fb61ff1ee08dce19f9a54fa8f04937f7e) + mstore(VK_GATE_SETUP_1_X_SLOT, 0x169ad5156d25b56f7b67ea6382f88b845ed5bae5b91aacfe51d8f0716afff2fb) + mstore(VK_GATE_SETUP_1_Y_SLOT, 0x2406e3268e4d5fa672142998ecf834034638a4a6f8b5e90205552c6aa1dde163) + mstore(VK_GATE_SETUP_2_X_SLOT, 0x05fd0ce0fdc590938d29c738c8dc956b32ca8e69c3babfbb49dc1c13a6d9a8d4) + mstore(VK_GATE_SETUP_2_Y_SLOT, 0x0a27dac323a04dd319d9805be879875c95063d0a55c96214cd45c913fba84460) + mstore(VK_GATE_SETUP_3_X_SLOT, 0x0d58a2a86b208a4976beb9bfd918514d448656e0ee66175eb344a4a17bba99f8) + mstore(VK_GATE_SETUP_3_Y_SLOT, 0x215fa609a1a425b84c9dc218c6cf999596d9eba6d35597ad7aaf2d077a6616ed) + mstore(VK_GATE_SETUP_4_X_SLOT, 0x1a26e6deccf91174ab13613363eb4939680828f0c6031f5039f9e6f264afa68c) + mstore(VK_GATE_SETUP_4_Y_SLOT, 0x1f5b2d6bffac1839edfd02cd0e41acc411f0ecbf6c5c4b1da0e12b68b99cb25d) + mstore(VK_GATE_SETUP_5_X_SLOT, 0x09b71be2e8a45dcbe7654cf369c4f1f2e7eab4b97869a469fb7a149d989f7226) + mstore(VK_GATE_SETUP_5_Y_SLOT, 0x197e1e2cefbd4f99558b89ca875e01fec0f14f05e5128bd869c87d6bf2f307fa) + mstore(VK_GATE_SETUP_6_X_SLOT, 0x0d7cef745da686fd44760403d452d72be504bb41b0a7f4fbe973a07558893871) + mstore(VK_GATE_SETUP_6_Y_SLOT, 0x1e9a863307cdfd3fdcf119f72279ddfda08b6f23c3672e8378dbb9d548734c29) + mstore(VK_GATE_SETUP_7_X_SLOT, 0x16af3f5d978446fdb37d84f5cf12e59f5c1088bde23f8260c0bb6792c5f78e99) + mstore(VK_GATE_SETUP_7_Y_SLOT, 0x167d3aeee50c0e53fd1e8a33941a806a34cfae5dc8b66578486e5d7207b5d546) + + // gate selectors commitments + mstore(VK_GATE_SELECTORS_0_X_SLOT, 0x1addc8e154c74bed403dc19558096ce22f1ceb2c656a2a5e85e56d2be6580ed1) + mstore(VK_GATE_SELECTORS_0_Y_SLOT, 0x1420d38f0ef206828efc36d0f5ad2b4d85fe768097f358fc671b7b3ec0239234) + mstore(VK_GATE_SELECTORS_1_X_SLOT, 0x2d5c06d0c8aa6a3520b8351f82341affcbb1a0bf27bceb9bab175e3e1d38cf47) + mstore(VK_GATE_SELECTORS_1_Y_SLOT, 0x0ff8d923a0374308147f6dd4fc513f6d0640f5df699f4836825ef460df3f8d6a) + + // permutation commitments + mstore(VK_PERMUTATION_0_X_SLOT, 0x1de8943a8f67d9f6fcbda10a1f37a82de9e9ffd0a0102ea5ce0ce6dd13b4031b) + mstore(VK_PERMUTATION_0_Y_SLOT, 0x1e04b0824853ab5d7c3412a217a1c5b88a2b4011be7e7e849485be8ed7332e41) + mstore(VK_PERMUTATION_1_X_SLOT, 0x2aa1817b9cc40b6cc7a7b3f832f3267580f9fb8e539666c00541e1a77e34a3da) + mstore(VK_PERMUTATION_1_Y_SLOT, 0x0edb3cde226205b01212fc1861303c49ef3ff66f060b5833dc9a3f661ef31dd9) + mstore(VK_PERMUTATION_2_X_SLOT, 0x13f5ae93c8eccc1455a0095302923442d4b0b3c8233d66ded99ffcf2ad641c27) + mstore(VK_PERMUTATION_2_Y_SLOT, 0x2dd42d42ccdea8b1901435ace12bc9e52c7dbbeb409d20c517ba942ed0cc7519) + mstore(VK_PERMUTATION_3_X_SLOT, 0x1a15a70a016be11af71e46e9c8a8d31ece32a7e657ae90356dd9535e6566645f) + mstore(VK_PERMUTATION_3_Y_SLOT, 0x0381d23e115521c6fc233c5346f79a6777bfa8871b7ee623d990cdcb5d8c3ce1) + + // lookup tables commitments + mstore(VK_LOOKUP_TABLE_0_X_SLOT, 0x2c513ed74d9d57a5ec901e074032741036353a2c4513422e96e7b53b302d765b) + mstore(VK_LOOKUP_TABLE_0_Y_SLOT, 0x04dd964427e430f16004076d708c0cb21e225056cc1d57418cfbd3d472981468) + mstore(VK_LOOKUP_TABLE_1_X_SLOT, 0x1ea83e5e65c6f8068f4677e2911678cf329b28259642a32db1f14b8347828aac) + mstore(VK_LOOKUP_TABLE_1_Y_SLOT, 0x1d22bc884a2da4962a893ba8de13f57aaeb785ed52c5e686994839cab8f7475d) + mstore(VK_LOOKUP_TABLE_2_X_SLOT, 0x0b2e7212d0d9cff26d0bdf3d79b2cac029a25dfeb1cafdf49e2349d7db348d89) + mstore(VK_LOOKUP_TABLE_2_Y_SLOT, 0x1301f9b252419ea240eb67fda720ca0b16d92364027285f95e9b1349490fa283) + mstore(VK_LOOKUP_TABLE_3_X_SLOT, 0x02f7b99fdfa5b418548c2d777785820e02383cfc87e7085e280a375a358153bf) + mstore(VK_LOOKUP_TABLE_3_Y_SLOT, 0x09d004fe08dc4d19c382df36fad22ef676185663543703e6a4b40203e50fd8a6) + + // lookup selector commitment + mstore(VK_LOOKUP_SELECTOR_X_SLOT, 0x1641f5d312e6f62720b1e6cd1d1be5bc0e69d10d20a12dc97ff04e2107e10ccc) + mstore(VK_LOOKUP_SELECTOR_Y_SLOT, 0x277f435d376acc3261ef9d5748e6705086214daf46d04edc80fbd657f8d9e73d) + + // table type commitment + mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x1b5f1cfddd6713cf25d9e6850a1b3fe80d6ef7fe2c67248f25362d5f9b31893c) + mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x0945076de03a0d240067e5f02b8fc11eaa589df3343542576eb59fdb3ecb57e0) + + // flag for using recursive part + mstore(VK_RECURSIVE_FLAG_SLOT, 0) + } + } +} diff --git a/l2-contracts/contracts/errors/L2ContractErrors.sol b/l2-contracts/contracts/errors/L2ContractErrors.sol new file mode 100644 index 000000000..bb16f38c6 --- /dev/null +++ b/l2-contracts/contracts/errors/L2ContractErrors.sol @@ -0,0 +1,46 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +// 0x1f73225f +error AddressMismatch(address expected, address supplied); +error AssetIdMismatch(bytes32 expected, bytes32 supplied); +// 0x5e85ae73 +error AmountMustBeGreaterThanZero(); +// 0xb4f54111 +error DeployFailed(); +// 0x7138356f +error EmptyAddress(); +// 0x1c25715b +error EmptyBytes32(); +// 0x1bdfd505 +error FailedToTransferTokens(address tokenContract, address to, uint256 amount); +// 0x2a1b2dd8 +error InsufficientAllowance(uint256 providedAllowance, uint256 requiredAmount); +// 0xcbd9d2e0 +error InvalidCaller(address); +// 0xb4fa3fb3 +error InvalidInput(); +// 0x0ac76f01 +error NonSequentialVersion(); +// 0x8e4a23d6 +error Unauthorized(address); +// 0x6e128399 +error Unimplemented(); +// 0xa4dde386 +error UnimplementedMessage(string message); +// 0xff15b069 +error UnsupportedPaymasterFlow(); +// 0x750b219c +error WithdrawFailed(); +error MalformedBytecode(BytecodeError); + +enum BytecodeError { + Version, + NumberOfWords, + Length, + WordsMustBeOdd, + DictionaryLength +} +// 0xd92e233d +error ZeroAddress(); diff --git a/l2-contracts/contracts/interfaces/IConsensusRegistry.sol b/l2-contracts/contracts/interfaces/IConsensusRegistry.sol index e3ddd118a..a5e017484 100644 --- a/l2-contracts/contracts/interfaces/IConsensusRegistry.sol +++ b/l2-contracts/contracts/interfaces/IConsensusRegistry.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /// @author Matter Labs /// @custom:security-contact security@matterlabs.dev diff --git a/l2-contracts/contracts/interfaces/IL2DAValidator.sol b/l2-contracts/contracts/interfaces/IL2DAValidator.sol new file mode 100644 index 000000000..1e053307d --- /dev/null +++ b/l2-contracts/contracts/interfaces/IL2DAValidator.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +interface IL2DAValidator { + function validatePubdata( + // The rolling hash of the user L2->L1 logs. + bytes32 _chainedLogsHash, + // The root hash of the user L2->L1 logs. + bytes32 _logsRootHash, + // The chained hash of the L2->L1 messages + bytes32 _chainedMessagesHash, + // The chained hash of uncompressed bytecodes sent to L1 + bytes32 _chainedBytecodesHash, + // Same operator input + bytes calldata _totalL2ToL1PubdataAndStateDiffs + ) external returns (bytes32 outputHash); +} diff --git a/l2-contracts/contracts/interfaces/IPaymaster.sol b/l2-contracts/contracts/interfaces/IPaymaster.sol index 8ba4bc694..ed7e5c50f 100644 --- a/l2-contracts/contracts/interfaces/IPaymaster.sol +++ b/l2-contracts/contracts/interfaces/IPaymaster.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {Transaction} from "../L2ContractHelper.sol"; diff --git a/l2-contracts/contracts/interfaces/IPaymasterFlow.sol b/l2-contracts/contracts/interfaces/IPaymasterFlow.sol index 2ee4f64f7..207aee24e 100644 --- a/l2-contracts/contracts/interfaces/IPaymasterFlow.sol +++ b/l2-contracts/contracts/interfaces/IPaymasterFlow.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs diff --git a/l2-contracts/contracts/vendor/AddressAliasHelper.sol b/l2-contracts/contracts/vendor/AddressAliasHelper.sol index da0282ed8..6adab1d92 100644 --- a/l2-contracts/contracts/vendor/AddressAliasHelper.sol +++ b/l2-contracts/contracts/vendor/AddressAliasHelper.sol @@ -15,11 +15,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; library AddressAliasHelper { - uint160 constant offset = uint160(0x1111000000000000000000000000000000001111); + uint160 internal constant offset = uint160(0x1111000000000000000000000000000000001111); /// @notice Utility function converts the address that submitted a tx /// to the inbox on L1 to the msg.sender viewed on L2 diff --git a/l2-contracts/contracts/verifier/TestnetVerifier.sol b/l2-contracts/contracts/verifier/TestnetVerifier.sol new file mode 100644 index 000000000..808fa70db --- /dev/null +++ b/l2-contracts/contracts/verifier/TestnetVerifier.sol @@ -0,0 +1,30 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {Verifier} from "./Verifier.sol"; +import {IVerifier} from "./chain-interfaces/IVerifier.sol"; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +/// @notice Modified version of the main verifier contract for the testnet environment +/// @dev This contract is used to skip the zkp verification for the testnet environment. +/// If the proof is not empty, it will verify it using the main verifier contract, +/// otherwise, it will skip the verification. +contract TestnetVerifier is Verifier { + constructor(uint256 _l1ChainId) { + assert(_l1ChainId != 1); + } + + /// @dev Verifies a zk-SNARK proof, skipping the verification if the proof is empty. + /// @inheritdoc IVerifier + function verify(uint256[] calldata _publicInputs, uint256[] calldata _proof) public view override returns (bool) { + // We allow skipping the zkp verification for the test(net) environment + // If the proof is not empty, verify it, otherwise, skip the verification + if (_proof.length == 0) { + return true; + } + + return super.verify(_publicInputs, _proof); + } +} diff --git a/l2-contracts/contracts/verifier/Verifier.sol b/l2-contracts/contracts/verifier/Verifier.sol new file mode 100644 index 000000000..dd4eaff55 --- /dev/null +++ b/l2-contracts/contracts/verifier/Verifier.sol @@ -0,0 +1,1711 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {IVerifier} from "./chain-interfaces/IVerifier.sol"; + +/* solhint-disable max-line-length */ +/// @author Matter Labs +/// @notice Modified version of the Permutations over Lagrange-bases for Oecumenical Noninteractive arguments of +/// Knowledge (PLONK) verifier. +/// Modifications have been made to optimize the proof system for ZK chain circuits. +/// @dev Contract was generated from a verification key with a hash of 0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2 +/// @dev It uses a custom memory layout inside the inline assembly block. Each reserved memory cell is declared in the +/// constants below. +/// @dev For a better understanding of the verifier algorithm please refer to the following papers: +/// * Original Plonk Article: https://eprint.iacr.org/2019/953.pdf +/// * Original LookUp Article: https://eprint.iacr.org/2020/315.pdf +/// * Plonk for ZKsync v1.1: https://github.com/matter-labs/solidity_plonk_verifier/raw/recursive/bellman_vk_codegen_recursive/RecursivePlonkUnrolledForEthereum.pdf +/// The notation used in the code is the same as in the papers. +/* solhint-enable max-line-length */ +contract Verifier is IVerifier { + /*////////////////////////////////////////////////////////////// + Verification keys + //////////////////////////////////////////////////////////////*/ + + // Memory slots from 0x000 to 0x200 are reserved for intermediate computations and call to precompiles. + + uint256 internal constant VK_GATE_SETUP_0_X_SLOT = 0x200 + 0x000; + uint256 internal constant VK_GATE_SETUP_0_Y_SLOT = 0x200 + 0x020; + uint256 internal constant VK_GATE_SETUP_1_X_SLOT = 0x200 + 0x040; + uint256 internal constant VK_GATE_SETUP_1_Y_SLOT = 0x200 + 0x060; + uint256 internal constant VK_GATE_SETUP_2_X_SLOT = 0x200 + 0x080; + uint256 internal constant VK_GATE_SETUP_2_Y_SLOT = 0x200 + 0x0a0; + uint256 internal constant VK_GATE_SETUP_3_X_SLOT = 0x200 + 0x0c0; + uint256 internal constant VK_GATE_SETUP_3_Y_SLOT = 0x200 + 0x0e0; + uint256 internal constant VK_GATE_SETUP_4_X_SLOT = 0x200 + 0x100; + uint256 internal constant VK_GATE_SETUP_4_Y_SLOT = 0x200 + 0x120; + uint256 internal constant VK_GATE_SETUP_5_X_SLOT = 0x200 + 0x140; + uint256 internal constant VK_GATE_SETUP_5_Y_SLOT = 0x200 + 0x160; + uint256 internal constant VK_GATE_SETUP_6_X_SLOT = 0x200 + 0x180; + uint256 internal constant VK_GATE_SETUP_6_Y_SLOT = 0x200 + 0x1a0; + uint256 internal constant VK_GATE_SETUP_7_X_SLOT = 0x200 + 0x1c0; + uint256 internal constant VK_GATE_SETUP_7_Y_SLOT = 0x200 + 0x1e0; + + uint256 internal constant VK_GATE_SELECTORS_0_X_SLOT = 0x200 + 0x200; + uint256 internal constant VK_GATE_SELECTORS_0_Y_SLOT = 0x200 + 0x220; + uint256 internal constant VK_GATE_SELECTORS_1_X_SLOT = 0x200 + 0x240; + uint256 internal constant VK_GATE_SELECTORS_1_Y_SLOT = 0x200 + 0x260; + + uint256 internal constant VK_PERMUTATION_0_X_SLOT = 0x200 + 0x280; + uint256 internal constant VK_PERMUTATION_0_Y_SLOT = 0x200 + 0x2a0; + uint256 internal constant VK_PERMUTATION_1_X_SLOT = 0x200 + 0x2c0; + uint256 internal constant VK_PERMUTATION_1_Y_SLOT = 0x200 + 0x2e0; + uint256 internal constant VK_PERMUTATION_2_X_SLOT = 0x200 + 0x300; + uint256 internal constant VK_PERMUTATION_2_Y_SLOT = 0x200 + 0x320; + uint256 internal constant VK_PERMUTATION_3_X_SLOT = 0x200 + 0x340; + uint256 internal constant VK_PERMUTATION_3_Y_SLOT = 0x200 + 0x360; + + uint256 internal constant VK_LOOKUP_SELECTOR_X_SLOT = 0x200 + 0x380; + uint256 internal constant VK_LOOKUP_SELECTOR_Y_SLOT = 0x200 + 0x3a0; + + uint256 internal constant VK_LOOKUP_TABLE_0_X_SLOT = 0x200 + 0x3c0; + uint256 internal constant VK_LOOKUP_TABLE_0_Y_SLOT = 0x200 + 0x3e0; + uint256 internal constant VK_LOOKUP_TABLE_1_X_SLOT = 0x200 + 0x400; + uint256 internal constant VK_LOOKUP_TABLE_1_Y_SLOT = 0x200 + 0x420; + uint256 internal constant VK_LOOKUP_TABLE_2_X_SLOT = 0x200 + 0x440; + uint256 internal constant VK_LOOKUP_TABLE_2_Y_SLOT = 0x200 + 0x460; + uint256 internal constant VK_LOOKUP_TABLE_3_X_SLOT = 0x200 + 0x480; + uint256 internal constant VK_LOOKUP_TABLE_3_Y_SLOT = 0x200 + 0x4a0; + + uint256 internal constant VK_LOOKUP_TABLE_TYPE_X_SLOT = 0x200 + 0x4c0; + uint256 internal constant VK_LOOKUP_TABLE_TYPE_Y_SLOT = 0x200 + 0x4e0; + + uint256 internal constant VK_RECURSIVE_FLAG_SLOT = 0x200 + 0x500; + + /*////////////////////////////////////////////////////////////// + Proof + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant PROOF_PUBLIC_INPUT = 0x200 + 0x520 + 0x000; + + uint256 internal constant PROOF_STATE_POLYS_0_X_SLOT = 0x200 + 0x520 + 0x020; + uint256 internal constant PROOF_STATE_POLYS_0_Y_SLOT = 0x200 + 0x520 + 0x040; + uint256 internal constant PROOF_STATE_POLYS_1_X_SLOT = 0x200 + 0x520 + 0x060; + uint256 internal constant PROOF_STATE_POLYS_1_Y_SLOT = 0x200 + 0x520 + 0x080; + uint256 internal constant PROOF_STATE_POLYS_2_X_SLOT = 0x200 + 0x520 + 0x0a0; + uint256 internal constant PROOF_STATE_POLYS_2_Y_SLOT = 0x200 + 0x520 + 0x0c0; + uint256 internal constant PROOF_STATE_POLYS_3_X_SLOT = 0x200 + 0x520 + 0x0e0; + uint256 internal constant PROOF_STATE_POLYS_3_Y_SLOT = 0x200 + 0x520 + 0x100; + + uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_X_SLOT = 0x200 + 0x520 + 0x120; + uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_Y_SLOT = 0x200 + 0x520 + 0x140; + + uint256 internal constant PROOF_LOOKUP_S_POLY_X_SLOT = 0x200 + 0x520 + 0x160; + uint256 internal constant PROOF_LOOKUP_S_POLY_Y_SLOT = 0x200 + 0x520 + 0x180; + + uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_X_SLOT = 0x200 + 0x520 + 0x1a0; + uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_Y_SLOT = 0x200 + 0x520 + 0x1c0; + + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_0_X_SLOT = 0x200 + 0x520 + 0x1e0; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_0_Y_SLOT = 0x200 + 0x520 + 0x200; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_1_X_SLOT = 0x200 + 0x520 + 0x220; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_1_Y_SLOT = 0x200 + 0x520 + 0x240; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_2_X_SLOT = 0x200 + 0x520 + 0x260; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_2_Y_SLOT = 0x200 + 0x520 + 0x280; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_3_X_SLOT = 0x200 + 0x520 + 0x2a0; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_3_Y_SLOT = 0x200 + 0x520 + 0x2c0; + + uint256 internal constant PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x2e0; + uint256 internal constant PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x300; + uint256 internal constant PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x320; + uint256 internal constant PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x340; + + uint256 internal constant PROOF_STATE_POLYS_3_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x360; + uint256 internal constant PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x380; + + uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x3a0; + uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x3c0; + uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x3e0; + + uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x400; + uint256 internal constant PROOF_LOOKUP_S_POLY_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x420; + uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x440; + uint256 internal constant PROOF_LOOKUP_T_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x460; + uint256 internal constant PROOF_LOOKUP_T_POLY_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x480; + uint256 internal constant PROOF_LOOKUP_SELECTOR_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x4a0; + uint256 internal constant PROOF_LOOKUP_TABLE_TYPE_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x4c0; + uint256 internal constant PROOF_QUOTIENT_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x4e0; + uint256 internal constant PROOF_LINEARISATION_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x500; + + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_X_SLOT = 0x200 + 0x520 + 0x520; + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_Y_SLOT = 0x200 + 0x520 + 0x540; + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT = 0x200 + 0x520 + 0x560; + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_OMEGA_Y_SLOT = 0x200 + 0x520 + 0x580; + + uint256 internal constant PROOF_RECURSIVE_PART_P1_X_SLOT = 0x200 + 0x520 + 0x5a0; + uint256 internal constant PROOF_RECURSIVE_PART_P1_Y_SLOT = 0x200 + 0x520 + 0x5c0; + + uint256 internal constant PROOF_RECURSIVE_PART_P2_X_SLOT = 0x200 + 0x520 + 0x5e0; + uint256 internal constant PROOF_RECURSIVE_PART_P2_Y_SLOT = 0x200 + 0x520 + 0x600; + + /*////////////////////////////////////////////////////////////// + Transcript slot + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant TRANSCRIPT_BEGIN_SLOT = 0x200 + 0x520 + 0x620 + 0x00; + uint256 internal constant TRANSCRIPT_DST_BYTE_SLOT = 0x200 + 0x520 + 0x620 + 0x03; + uint256 internal constant TRANSCRIPT_STATE_0_SLOT = 0x200 + 0x520 + 0x620 + 0x04; + uint256 internal constant TRANSCRIPT_STATE_1_SLOT = 0x200 + 0x520 + 0x620 + 0x24; + uint256 internal constant TRANSCRIPT_CHALLENGE_SLOT = 0x200 + 0x520 + 0x620 + 0x44; + + /*////////////////////////////////////////////////////////////// + Partial verifier state + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant STATE_ALPHA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x000; + uint256 internal constant STATE_BETA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x020; + uint256 internal constant STATE_GAMMA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x040; + uint256 internal constant STATE_POWER_OF_ALPHA_2_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x060; + uint256 internal constant STATE_POWER_OF_ALPHA_3_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x080; + uint256 internal constant STATE_POWER_OF_ALPHA_4_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x0a0; + uint256 internal constant STATE_POWER_OF_ALPHA_5_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x0c0; + uint256 internal constant STATE_POWER_OF_ALPHA_6_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x0e0; + uint256 internal constant STATE_POWER_OF_ALPHA_7_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x100; + uint256 internal constant STATE_POWER_OF_ALPHA_8_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x120; + uint256 internal constant STATE_ETA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x140; + uint256 internal constant STATE_BETA_LOOKUP_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x160; + uint256 internal constant STATE_GAMMA_LOOKUP_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x180; + uint256 internal constant STATE_BETA_PLUS_ONE_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x1a0; + uint256 internal constant STATE_BETA_GAMMA_PLUS_GAMMA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x1c0; + uint256 internal constant STATE_V_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x1e0; + uint256 internal constant STATE_U_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x200; + uint256 internal constant STATE_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x220; + uint256 internal constant STATE_Z_MINUS_LAST_OMEGA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x240; + uint256 internal constant STATE_L_0_AT_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x260; + uint256 internal constant STATE_L_N_MINUS_ONE_AT_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x280; + uint256 internal constant STATE_Z_IN_DOMAIN_SIZE = 0x200 + 0x520 + 0x620 + 0x80 + 0x2a0; + + /*////////////////////////////////////////////////////////////// + Queries + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant QUERIES_BUFFER_POINT_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x00; + + uint256 internal constant QUERIES_AT_Z_0_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x40; + uint256 internal constant QUERIES_AT_Z_0_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x60; + uint256 internal constant QUERIES_AT_Z_1_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x80; + uint256 internal constant QUERIES_AT_Z_1_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0xa0; + + uint256 internal constant QUERIES_T_POLY_AGGREGATED_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0xc0; + uint256 internal constant QUERIES_T_POLY_AGGREGATED_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0xe0; + + /*////////////////////////////////////////////////////////////// + Aggregated commitment + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant AGGREGATED_AT_Z_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x00; + uint256 internal constant AGGREGATED_AT_Z_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x20; + + uint256 internal constant AGGREGATED_AT_Z_OMEGA_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x40; + uint256 internal constant AGGREGATED_AT_Z_OMEGA_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x60; + + uint256 internal constant AGGREGATED_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x80; + uint256 internal constant AGGREGATED_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xa0; + + /*////////////////////////////////////////////////////////////// + Pairing data + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant PAIRING_BUFFER_POINT_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x00; + uint256 internal constant PAIRING_BUFFER_POINT_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x20; + + uint256 internal constant PAIRING_PAIR_WITH_GENERATOR_X_SLOT = + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x40; + uint256 internal constant PAIRING_PAIR_WITH_GENERATOR_Y_SLOT = + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x60; + + uint256 internal constant PAIRING_PAIR_WITH_X_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0x80; + uint256 internal constant PAIRING_PAIR_WITH_X_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xa0; + + /*////////////////////////////////////////////////////////////// + Slots for scalar multiplication optimizations + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant COPY_PERMUTATION_FIRST_AGGREGATED_COMMITMENT_COEFF = + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xc0; + uint256 internal constant LOOKUP_GRAND_PRODUCT_FIRST_AGGREGATED_COMMITMENT_COEFF = + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xe0; + uint256 internal constant LOOKUP_S_FIRST_AGGREGATED_COMMITMENT_COEFF = + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0x100; + + /*////////////////////////////////////////////////////////////// + Constants + //////////////////////////////////////////////////////////////*/ + + uint256 internal constant OMEGA = 0x1951441010b2b95a6e47a6075066a50a036f5ba978c050f2821df86636c0facb; + uint256 internal constant DOMAIN_SIZE = 0x1000000; // 2^24 + uint256 internal constant Q_MOD = 21888242871839275222246405745257275088696311157297823662689037894645226208583; + uint256 internal constant R_MOD = 21888242871839275222246405745257275088548364400416034343698204186575808495617; + + /// @dev flip of 0xe000000000000000000000000000000000000000000000000000000000000000; + uint256 internal constant FR_MASK = 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff; + + // non residues + uint256 internal constant NON_RESIDUES_0 = 0x05; + uint256 internal constant NON_RESIDUES_1 = 0x07; + uint256 internal constant NON_RESIDUES_2 = 0x0a; + + // trusted setup g2 elements + uint256 internal constant G2_ELEMENTS_0_X1 = 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2; + uint256 internal constant G2_ELEMENTS_0_X2 = 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed; + uint256 internal constant G2_ELEMENTS_0_Y1 = 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b; + uint256 internal constant G2_ELEMENTS_0_Y2 = 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa; + uint256 internal constant G2_ELEMENTS_1_X1 = 0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1; + uint256 internal constant G2_ELEMENTS_1_X2 = 0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0; + uint256 internal constant G2_ELEMENTS_1_Y1 = 0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4; + uint256 internal constant G2_ELEMENTS_1_Y2 = 0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55; + + /// @inheritdoc IVerifier + function verificationKeyHash() external pure returns (bytes32 vkHash) { + _loadVerificationKey(); + + assembly { + let start := VK_GATE_SETUP_0_X_SLOT + let end := VK_RECURSIVE_FLAG_SLOT + let length := add(sub(end, start), 0x20) + + vkHash := keccak256(start, length) + } + } + + /// @notice Load verification keys to memory in runtime. + /// @dev The constants are loaded into memory in a specific layout declared in the constants starting from + /// `VK_` prefix. + /// NOTE: Function may corrupt the memory state if some memory was used before this function was called. + /// The VK consists of commitments to setup polynomials: + /// [q_a], [q_b], [q_c], [q_d], - main gate setup commitments + /// [q_{d_next}], [q_ab], [q_ac], [q_const] / + /// [main_gate_selector], [custom_gate_selector] - gate selectors commitments + /// [sigma_0], [sigma_1], [sigma_2], [sigma_3] - permutation polynomials commitments + /// [lookup_selector] - lookup selector commitment + /// [col_0], [col_1], [col_2], [col_3] - lookup columns commitments + /// [table_type] - lookup table type commitment + function _loadVerificationKey() internal pure virtual { + assembly { + // gate setup commitments + mstore(VK_GATE_SETUP_0_X_SLOT, 0x110deb1e0863737f9a3d7b4de641a03aa00a77bc9f1a05acc9d55b76ab9fdd4d) + mstore(VK_GATE_SETUP_0_Y_SLOT, 0x2c9dc252441e9298b7f6df6335a252517b7bccb924adf537b87c5cd3383fd7a9) + mstore(VK_GATE_SETUP_1_X_SLOT, 0x04659caf7b05471ba5ba85b1ab62267aa6c456836e625f169f7119d55b9462d2) + mstore(VK_GATE_SETUP_1_Y_SLOT, 0x0ea63403692148d2ad22189a1e5420076312f4d46e62036a043a6b0b84d5b410) + mstore(VK_GATE_SETUP_2_X_SLOT, 0x0e6696d09d65fce1e42805be03fca1f14aea247281f688981f925e77d4ce2291) + mstore(VK_GATE_SETUP_2_Y_SLOT, 0x0228f6cf8fe20c1e07e5b78bf8c41d50e55975a126d22a198d1e56acd4bbb3dd) + mstore(VK_GATE_SETUP_3_X_SLOT, 0x14685dafe340b1dec5eafcd5e7faddaf24f3781ddc53309cc25d0b42c00541dd) + mstore(VK_GATE_SETUP_3_Y_SLOT, 0x0e651cff9447cb360198899b80fa23e89ec13bc94ff161729aa841d2b55ea5be) + mstore(VK_GATE_SETUP_4_X_SLOT, 0x16e9ef76cb68f2750eb0ee72382dd9911a982308d0ab10ef94dada13c382ae73) + mstore(VK_GATE_SETUP_4_Y_SLOT, 0x22e404bc91350f3bc7daad1d1025113742436983c85eac5ab7b42221a181b81e) + mstore(VK_GATE_SETUP_5_X_SLOT, 0x0d9b29613037a5025655c82b143d2b7449c98f3aea358307c8529249cc54f3b9) + mstore(VK_GATE_SETUP_5_Y_SLOT, 0x15b3c4c946ad1babfc4c03ff7c2423fd354af3a9305c499b7fb3aaebe2fee746) + mstore(VK_GATE_SETUP_6_X_SLOT, 0x2a4cb6c495dbc7201142cc773da895ae2046e790073988fb850aca6aead27b8a) + mstore(VK_GATE_SETUP_6_Y_SLOT, 0x28ef9200c3cb67da82030520d640292014f5f7c2e2909da608812e04671a3acf) + mstore(VK_GATE_SETUP_7_X_SLOT, 0x283344a1ab3e55ecfd904d0b8e9f4faea338df5a4ead2fa9a42f0e103da40abc) + mstore(VK_GATE_SETUP_7_Y_SLOT, 0x223b37b83b9687512d322993edd70e508dd80adb10bcf7321a3cc8a44c269521) + + // gate selectors commitments + mstore(VK_GATE_SELECTORS_0_X_SLOT, 0x1f67f0ba5f7e837bc680acb4e612ebd938ad35211aa6e05b96cad19e66b82d2d) + mstore(VK_GATE_SELECTORS_0_Y_SLOT, 0x2820641a84d2e8298ac2ac42bd4b912c0c37f768ecc83d3a29e7c720763d15a1) + mstore(VK_GATE_SELECTORS_1_X_SLOT, 0x0353257957562270292a17860ca8e8827703f828f440ee004848b1e23fdf9de2) + mstore(VK_GATE_SELECTORS_1_Y_SLOT, 0x305f4137fee253dff8b2bfe579038e8f25d5bd217865072af5d89fc8800ada24) + + // permutation commitments + mstore(VK_PERMUTATION_0_X_SLOT, 0x13a600154b369ff3237706d00948e465ee1c32c7a6d3e18bccd9c4a15910f2e5) + mstore(VK_PERMUTATION_0_Y_SLOT, 0x138aa24fbf4cdddc75114811b3d59040394c218ecef3eb46ef9bd646f7e53776) + mstore(VK_PERMUTATION_1_X_SLOT, 0x277fff1f80c409357e2d251d79f6e3fd2164b755ce69cfd72de5c690289df662) + mstore(VK_PERMUTATION_1_Y_SLOT, 0x25235588e28c70eea3e35531c80deac25cd9b53ea3f98993f120108bc7abf670) + mstore(VK_PERMUTATION_2_X_SLOT, 0x0990e07a9b001048b947d0e5bd6157214c7359b771f01bf52bd771ba563a900e) + mstore(VK_PERMUTATION_2_Y_SLOT, 0x05e5fb090dd40914c8606d875e301167ae3047d684a02b44d9d36f1eaf43d0b4) + mstore(VK_PERMUTATION_3_X_SLOT, 0x1d4656690b33299db5631401a282afab3e16c78ee2c9ad9efea628171dcbc6bc) + mstore(VK_PERMUTATION_3_Y_SLOT, 0x0ebda2ebe582f601f813ec1e3970d13ef1500c742a85cce9b7f190f333de03b0) + + // lookup tables commitments + mstore(VK_LOOKUP_TABLE_0_X_SLOT, 0x2c513ed74d9d57a5ec901e074032741036353a2c4513422e96e7b53b302d765b) + mstore(VK_LOOKUP_TABLE_0_Y_SLOT, 0x04dd964427e430f16004076d708c0cb21e225056cc1d57418cfbd3d472981468) + mstore(VK_LOOKUP_TABLE_1_X_SLOT, 0x1ea83e5e65c6f8068f4677e2911678cf329b28259642a32db1f14b8347828aac) + mstore(VK_LOOKUP_TABLE_1_Y_SLOT, 0x1d22bc884a2da4962a893ba8de13f57aaeb785ed52c5e686994839cab8f7475d) + mstore(VK_LOOKUP_TABLE_2_X_SLOT, 0x0b2e7212d0d9cff26d0bdf3d79b2cac029a25dfeb1cafdf49e2349d7db348d89) + mstore(VK_LOOKUP_TABLE_2_Y_SLOT, 0x1301f9b252419ea240eb67fda720ca0b16d92364027285f95e9b1349490fa283) + mstore(VK_LOOKUP_TABLE_3_X_SLOT, 0x02f7b99fdfa5b418548c2d777785820e02383cfc87e7085e280a375a358153bf) + mstore(VK_LOOKUP_TABLE_3_Y_SLOT, 0x09d004fe08dc4d19c382df36fad22ef676185663543703e6a4b40203e50fd8a6) + + // lookup selector commitment + mstore(VK_LOOKUP_SELECTOR_X_SLOT, 0x2f4d347c7fb61daaadfff881e24f4b5dcfdc0d70a95bcb148168b90ef93e0007) + mstore(VK_LOOKUP_SELECTOR_Y_SLOT, 0x2322632465ba8e28cd0a4befd813ea85a972f4f6fa8e8603cf5d062dbcb14065) + + // table type commitment + mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x1e3c9fc98c118e4bc34f1f93d214a5d86898e980c40d8e2c180c6ada377a7467) + mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x2260a13535c35a15c173f5e5797d4b675b55d164a9995bfb7624971324bd84a8) + + // flag for using recursive part + mstore(VK_RECURSIVE_FLAG_SLOT, 0) + } + } + + /// @inheritdoc IVerifier + function verify( + uint256[] calldata, // _publicInputs + uint256[] calldata // _proof + ) public view virtual returns (bool) { + // No memory was accessed yet, so keys can be loaded into the right place and not corrupt any other memory. + _loadVerificationKey(); + + // Beginning of the big inline assembly block that makes all the verification work. + // Note: We use the custom memory layout, so the return value should be returned from the assembly, not + // Solidity code. + assembly { + /*////////////////////////////////////////////////////////////// + Utils + //////////////////////////////////////////////////////////////*/ + + /// @dev Reverts execution with a provided revert reason. + /// @param len The byte length of the error message string, which is expected to be no more than 32. + /// @param reason The 1-word revert reason string, encoded in ASCII. + function revertWithMessage(len, reason) { + // "Error(string)" signature: bytes32(bytes4(keccak256("Error(string)"))) + mstore(0x00, 0x08c379a000000000000000000000000000000000000000000000000000000000) + // Data offset + mstore(0x04, 0x0000000000000000000000000000000000000000000000000000000000000020) + // Length of revert string + mstore(0x24, len) + // Revert reason + mstore(0x44, reason) + // Revert + revert(0x00, 0x64) + } + + /// @dev Performs modular exponentiation using the formula (value ^ power) mod R_MOD. + function modexp(value, power) -> res { + res := 1 + for { + + } gt(power, 0) { + + } { + if mod(power, 2) { + res := mulmod(res, value, R_MOD) + } + value := mulmod(value, value, R_MOD) + power := shr(1, power) + } + } + + /// @dev Performs a point multiplication operation and stores the result in a given memory destination. + function pointMulIntoDest(point, s, dest) { + mstore(0x00, mload(point)) + mstore(0x20, mload(add(point, 0x20))) + mstore(0x40, s) + if iszero(staticcall(gas(), 7, 0, 0x60, dest, 0x40)) { + revertWithMessage(30, "pointMulIntoDest: ecMul failed") + } + } + + /// @dev Performs a point addition operation and stores the result in a given memory destination. + function pointAddIntoDest(p1, p2, dest) { + mstore(0x00, mload(p1)) + mstore(0x20, mload(add(p1, 0x20))) + mstore(0x40, mload(p2)) + mstore(0x60, mload(add(p2, 0x20))) + if iszero(staticcall(gas(), 6, 0x00, 0x80, dest, 0x40)) { + revertWithMessage(30, "pointAddIntoDest: ecAdd failed") + } + } + + /// @dev Performs a point subtraction operation and updates the first point with the result. + function pointSubAssign(p1, p2) { + mstore(0x00, mload(p1)) + mstore(0x20, mload(add(p1, 0x20))) + mstore(0x40, mload(p2)) + mstore(0x60, sub(Q_MOD, mload(add(p2, 0x20)))) + if iszero(staticcall(gas(), 6, 0x00, 0x80, p1, 0x40)) { + revertWithMessage(28, "pointSubAssign: ecAdd failed") + } + } + + /// @dev Performs a point addition operation and updates the first point with the result. + function pointAddAssign(p1, p2) { + mstore(0x00, mload(p1)) + mstore(0x20, mload(add(p1, 0x20))) + mstore(0x40, mload(p2)) + mstore(0x60, mload(add(p2, 0x20))) + if iszero(staticcall(gas(), 6, 0x00, 0x80, p1, 0x40)) { + revertWithMessage(28, "pointAddAssign: ecAdd failed") + } + } + + /// @dev Performs a point multiplication operation and then adds the result to the destination point. + function pointMulAndAddIntoDest(point, s, dest) { + mstore(0x00, mload(point)) + mstore(0x20, mload(add(point, 0x20))) + mstore(0x40, s) + let success := staticcall(gas(), 7, 0, 0x60, 0, 0x40) + + mstore(0x40, mload(dest)) + mstore(0x60, mload(add(dest, 0x20))) + success := and(success, staticcall(gas(), 6, 0x00, 0x80, dest, 0x40)) + + if iszero(success) { + revertWithMessage(22, "pointMulAndAddIntoDest") + } + } + + /// @dev Negates an elliptic curve point by changing the sign of the y-coordinate. + function pointNegate(point) { + let pY := mload(add(point, 0x20)) + switch pY + case 0 { + if mload(point) { + revertWithMessage(26, "pointNegate: invalid point") + } + } + default { + mstore(add(point, 0x20), sub(Q_MOD, pY)) + } + } + + /*////////////////////////////////////////////////////////////// + Transcript helpers + //////////////////////////////////////////////////////////////*/ + + /// @dev Updates the transcript state with a new challenge value. + function updateTranscript(value) { + mstore8(TRANSCRIPT_DST_BYTE_SLOT, 0x00) + mstore(TRANSCRIPT_CHALLENGE_SLOT, value) + let newState0 := keccak256(TRANSCRIPT_BEGIN_SLOT, 0x64) + mstore8(TRANSCRIPT_DST_BYTE_SLOT, 0x01) + let newState1 := keccak256(TRANSCRIPT_BEGIN_SLOT, 0x64) + mstore(TRANSCRIPT_STATE_1_SLOT, newState1) + mstore(TRANSCRIPT_STATE_0_SLOT, newState0) + } + + /// @dev Retrieves a transcript challenge. + function getTranscriptChallenge(numberOfChallenge) -> challenge { + mstore8(TRANSCRIPT_DST_BYTE_SLOT, 0x02) + mstore(TRANSCRIPT_CHALLENGE_SLOT, shl(224, numberOfChallenge)) + challenge := and(keccak256(TRANSCRIPT_BEGIN_SLOT, 0x48), FR_MASK) + } + + /*////////////////////////////////////////////////////////////// + 1. Load Proof + //////////////////////////////////////////////////////////////*/ + + /// @dev This function loads a zk-SNARK proof, ensures it's properly formatted, and stores it in memory. + /// It ensures the number of inputs and the elliptic curve point's validity. + /// Note: It does NOT reject inputs that exceed these module sizes, but rather wraps them within the + /// module bounds. + /// The proof consists of: + /// 1. Public input: (1 field element from F_r) + /// + /// 2. Polynomial commitments (elliptic curve points over F_q): + /// [a], [b], [c], [d] - state polynomials commitments + /// [z_perm] - copy-permutation grand product commitment + /// [s] - polynomial for lookup argument commitment + /// [z_lookup] - lookup grand product commitment + /// [t_0], [t_1], [t_2], [t_3] - quotient polynomial parts commitments + /// [W], [W'] - proof openings commitments + /// + /// 3. Polynomial evaluations at z and z*omega (field elements from F_r): + /// t(z) - quotient polynomial opening + /// a(z), b(z), c(z), d(z), d(z*omega) - state polynomials openings + /// main_gate_selector(z) - main gate selector opening + /// sigma_0(z), sigma_1(z), sigma_2(z) - permutation polynomials openings + /// z_perm(z*omega) - copy-permutation grand product opening + /// z_lookup(z*omega) - lookup grand product opening + /// lookup_selector(z) - lookup selector opening + /// s(x*omega), t(z*omega), table_type(z) - lookup argument polynomial openings + /// r(z) - linearisation polynomial opening + /// + /// 4. Recursive proof (0 or 2 elliptic curve points over F_q) + function loadProof() { + // 1. Load public input + let offset := calldataload(0x04) + let publicInputLengthInWords := calldataload(add(offset, 0x04)) + let isValid := eq(publicInputLengthInWords, 1) // We expect only one public input + mstore(PROOF_PUBLIC_INPUT, and(calldataload(add(offset, 0x24)), FR_MASK)) + + // 2. Load the proof (except for the recursive part) + offset := calldataload(0x24) + let proofLengthInWords := calldataload(add(offset, 0x04)) + + // Check the proof length depending on whether the recursive part is present + let expectedProofLength + switch mload(VK_RECURSIVE_FLAG_SLOT) + case 0 { + expectedProofLength := 44 + } + default { + expectedProofLength := 48 + } + isValid := and(eq(proofLengthInWords, expectedProofLength), isValid) + + // PROOF_STATE_POLYS_0 + { + let x := mod(calldataload(add(offset, 0x024)), Q_MOD) + let y := mod(calldataload(add(offset, 0x044)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_STATE_POLYS_0_X_SLOT, x) + mstore(PROOF_STATE_POLYS_0_Y_SLOT, y) + } + // PROOF_STATE_POLYS_1 + { + let x := mod(calldataload(add(offset, 0x064)), Q_MOD) + let y := mod(calldataload(add(offset, 0x084)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_STATE_POLYS_1_X_SLOT, x) + mstore(PROOF_STATE_POLYS_1_Y_SLOT, y) + } + // PROOF_STATE_POLYS_2 + { + let x := mod(calldataload(add(offset, 0x0a4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x0c4)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_STATE_POLYS_2_X_SLOT, x) + mstore(PROOF_STATE_POLYS_2_Y_SLOT, y) + } + // PROOF_STATE_POLYS_3 + { + let x := mod(calldataload(add(offset, 0x0e4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x104)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_STATE_POLYS_3_X_SLOT, x) + mstore(PROOF_STATE_POLYS_3_Y_SLOT, y) + } + // PROOF_COPY_PERMUTATION_GRAND_PRODUCT + { + let x := mod(calldataload(add(offset, 0x124)), Q_MOD) + let y := mod(calldataload(add(offset, 0x144)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_X_SLOT, x) + mstore(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_Y_SLOT, y) + } + // PROOF_LOOKUP_S_POLY + { + let x := mod(calldataload(add(offset, 0x164)), Q_MOD) + let y := mod(calldataload(add(offset, 0x184)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_LOOKUP_S_POLY_X_SLOT, x) + mstore(PROOF_LOOKUP_S_POLY_Y_SLOT, y) + } + // PROOF_LOOKUP_GRAND_PRODUCT + { + let x := mod(calldataload(add(offset, 0x1a4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x1c4)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_LOOKUP_GRAND_PRODUCT_X_SLOT, x) + mstore(PROOF_LOOKUP_GRAND_PRODUCT_Y_SLOT, y) + } + // PROOF_QUOTIENT_POLY_PARTS_0 + { + let x := mod(calldataload(add(offset, 0x1e4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x204)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_QUOTIENT_POLY_PARTS_0_X_SLOT, x) + mstore(PROOF_QUOTIENT_POLY_PARTS_0_Y_SLOT, y) + } + // PROOF_QUOTIENT_POLY_PARTS_1 + { + let x := mod(calldataload(add(offset, 0x224)), Q_MOD) + let y := mod(calldataload(add(offset, 0x244)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_QUOTIENT_POLY_PARTS_1_X_SLOT, x) + mstore(PROOF_QUOTIENT_POLY_PARTS_1_Y_SLOT, y) + } + // PROOF_QUOTIENT_POLY_PARTS_2 + { + let x := mod(calldataload(add(offset, 0x264)), Q_MOD) + let y := mod(calldataload(add(offset, 0x284)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_QUOTIENT_POLY_PARTS_2_X_SLOT, x) + mstore(PROOF_QUOTIENT_POLY_PARTS_2_Y_SLOT, y) + } + // PROOF_QUOTIENT_POLY_PARTS_3 + { + let x := mod(calldataload(add(offset, 0x2a4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x2c4)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_QUOTIENT_POLY_PARTS_3_X_SLOT, x) + mstore(PROOF_QUOTIENT_POLY_PARTS_3_Y_SLOT, y) + } + + mstore(PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x2e4)), R_MOD)) + mstore(PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x304)), R_MOD)) + mstore(PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x324)), R_MOD)) + mstore(PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x344)), R_MOD)) + + mstore(PROOF_STATE_POLYS_3_OPENING_AT_Z_OMEGA_SLOT, mod(calldataload(add(offset, 0x364)), R_MOD)) + mstore(PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x384)), R_MOD)) + + mstore(PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x3a4)), R_MOD)) + mstore(PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x3c4)), R_MOD)) + mstore(PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x3e4)), R_MOD)) + + mstore( + PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT, + mod(calldataload(add(offset, 0x404)), R_MOD) + ) + mstore(PROOF_LOOKUP_S_POLY_OPENING_AT_Z_OMEGA_SLOT, mod(calldataload(add(offset, 0x424)), R_MOD)) + mstore(PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT, mod(calldataload(add(offset, 0x444)), R_MOD)) + mstore(PROOF_LOOKUP_T_POLY_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x464)), R_MOD)) + mstore(PROOF_LOOKUP_T_POLY_OPENING_AT_Z_OMEGA_SLOT, mod(calldataload(add(offset, 0x484)), R_MOD)) + mstore(PROOF_LOOKUP_SELECTOR_POLY_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x4a4)), R_MOD)) + mstore(PROOF_LOOKUP_TABLE_TYPE_POLY_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x4c4)), R_MOD)) + mstore(PROOF_QUOTIENT_POLY_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x4e4)), R_MOD)) + mstore(PROOF_LINEARISATION_POLY_OPENING_AT_Z_SLOT, mod(calldataload(add(offset, 0x504)), R_MOD)) + + // PROOF_OPENING_PROOF_AT_Z + { + let x := mod(calldataload(add(offset, 0x524)), Q_MOD) + let y := mod(calldataload(add(offset, 0x544)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_OPENING_PROOF_AT_Z_X_SLOT, x) + mstore(PROOF_OPENING_PROOF_AT_Z_Y_SLOT, y) + } + // PROOF_OPENING_PROOF_AT_Z_OMEGA + { + let x := mod(calldataload(add(offset, 0x564)), Q_MOD) + let y := mod(calldataload(add(offset, 0x584)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT, x) + mstore(PROOF_OPENING_PROOF_AT_Z_OMEGA_Y_SLOT, y) + } + + // 3. Load the recursive part of the proof + if mload(VK_RECURSIVE_FLAG_SLOT) { + // recursive part should be consist of 2 points + + // PROOF_RECURSIVE_PART_P1 + { + let x := mod(calldataload(add(offset, 0x5a4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x5c4)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_RECURSIVE_PART_P1_X_SLOT, x) + mstore(PROOF_RECURSIVE_PART_P1_Y_SLOT, y) + } + // PROOF_RECURSIVE_PART_P2 + { + let x := mod(calldataload(add(offset, 0x5e4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x604)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_RECURSIVE_PART_P2_X_SLOT, x) + mstore(PROOF_RECURSIVE_PART_P2_Y_SLOT, y) + } + } + + // Revert if a proof is not valid + if iszero(isValid) { + revertWithMessage(27, "loadProof: Proof is invalid") + } + } + + /*////////////////////////////////////////////////////////////// + 2. Transcript initialization + //////////////////////////////////////////////////////////////*/ + + /// @notice Recomputes all challenges + /// @dev The process is the following: + /// Commit: PI, [a], [b], [c], [d] + /// Get: eta + /// Commit: [s] + /// Get: beta, gamma + /// Commit: [z_perm] + /// Get: beta', gamma' + /// Commit: [z_lookup] + /// Get: alpha + /// Commit: [t_0], [t_1], [t_2], [t_3] + /// Get: z + /// Commit: t(z), a(z), b(z), c(z), d(z), d(z*omega), + /// main_gate_selector(z), + /// sigma_0(z), sigma_1(z), sigma_2(z), + /// z_perm(z*omega), + /// t(z), lookup_selector(z), table_type(z), + /// s(x*omega), z_lookup(z*omega), t(z*omega), + /// r(z) + /// Get: v + /// Commit: [W], [W'] + /// Get: u + function initializeTranscript() { + // Round 1 + updateTranscript(mload(PROOF_PUBLIC_INPUT)) + updateTranscript(mload(PROOF_STATE_POLYS_0_X_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_0_Y_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_1_X_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_1_Y_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_2_X_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_2_Y_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_3_X_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_3_Y_SLOT)) + + mstore(STATE_ETA_SLOT, getTranscriptChallenge(0)) + + // Round 1.5 + updateTranscript(mload(PROOF_LOOKUP_S_POLY_X_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_S_POLY_Y_SLOT)) + + mstore(STATE_BETA_SLOT, getTranscriptChallenge(1)) + mstore(STATE_GAMMA_SLOT, getTranscriptChallenge(2)) + + // Round 2 + updateTranscript(mload(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_X_SLOT)) + updateTranscript(mload(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_Y_SLOT)) + + mstore(STATE_BETA_LOOKUP_SLOT, getTranscriptChallenge(3)) + mstore(STATE_GAMMA_LOOKUP_SLOT, getTranscriptChallenge(4)) + + // Round 2.5 + updateTranscript(mload(PROOF_LOOKUP_GRAND_PRODUCT_X_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_GRAND_PRODUCT_Y_SLOT)) + + mstore(STATE_ALPHA_SLOT, getTranscriptChallenge(5)) + + // Round 3 + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_0_X_SLOT)) + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_0_Y_SLOT)) + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_1_X_SLOT)) + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_1_Y_SLOT)) + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_2_X_SLOT)) + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_2_Y_SLOT)) + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_3_X_SLOT)) + updateTranscript(mload(PROOF_QUOTIENT_POLY_PARTS_3_Y_SLOT)) + + { + let z := getTranscriptChallenge(6) + + mstore(STATE_Z_SLOT, z) + mstore(STATE_Z_IN_DOMAIN_SIZE, modexp(z, DOMAIN_SIZE)) + } + + // Round 4 + updateTranscript(mload(PROOF_QUOTIENT_POLY_OPENING_AT_Z_SLOT)) + + updateTranscript(mload(PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT)) + + updateTranscript(mload(PROOF_STATE_POLYS_3_OPENING_AT_Z_OMEGA_SLOT)) + updateTranscript(mload(PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT)) + + updateTranscript(mload(PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT)) + + updateTranscript(mload(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_T_POLY_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_SELECTOR_POLY_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_TABLE_TYPE_POLY_OPENING_AT_Z_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_S_POLY_OPENING_AT_Z_OMEGA_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT)) + updateTranscript(mload(PROOF_LOOKUP_T_POLY_OPENING_AT_Z_OMEGA_SLOT)) + updateTranscript(mload(PROOF_LINEARISATION_POLY_OPENING_AT_Z_SLOT)) + + mstore(STATE_V_SLOT, getTranscriptChallenge(7)) + + // Round 5 + updateTranscript(mload(PROOF_OPENING_PROOF_AT_Z_X_SLOT)) + updateTranscript(mload(PROOF_OPENING_PROOF_AT_Z_Y_SLOT)) + updateTranscript(mload(PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT)) + updateTranscript(mload(PROOF_OPENING_PROOF_AT_Z_OMEGA_Y_SLOT)) + + mstore(STATE_U_SLOT, getTranscriptChallenge(8)) + } + + /*////////////////////////////////////////////////////////////// + 3. Verifying quotient evaluation + //////////////////////////////////////////////////////////////*/ + + /// @notice Compute linearisation polynomial's constant term: r_0 + /// @dev To save a verifier scalar multiplication, we split linearisation polynomial + /// into its constant and non-constant terms. The constant term is computed with the formula: + /// + /// r_0 = alpha^0 * L_0(z) * PI * q_{main selector}(z) + r(z) -- main gate contribution + /// + /// - alpha^4 * z_perm(z*omega)(sigma_0(z) * beta + gamma + a(z)) \ + /// (sigma_1(z) * beta + gamma + b(z)) | + /// (sigma_2(z) * beta + gamma + c(z)) | - permutation contribution + /// (sigma_3(z) + gamma) | + /// - alpha^5 * L_0(z) / + /// + /// + alpha^6 * (s(z*omega) * beta' + gamma' (beta' + 1)) \ + /// * (z - omega^{n-1}) * z_lookup(z*omega) | - lookup contribution + /// - alpha^7 * L_0(z) | + /// - alpha^8 * L_{n-1}(z) * (gamma' (beta' + 1))^{n-1} / + /// + /// In the end we should check that t(z)*Z_H(z) = r(z) + r_0! + function verifyQuotientEvaluation() { + // Compute power of alpha + { + let alpha := mload(STATE_ALPHA_SLOT) + let currentAlpha := mulmod(alpha, alpha, R_MOD) + mstore(STATE_POWER_OF_ALPHA_2_SLOT, currentAlpha) + currentAlpha := mulmod(currentAlpha, alpha, R_MOD) + mstore(STATE_POWER_OF_ALPHA_3_SLOT, currentAlpha) + currentAlpha := mulmod(currentAlpha, alpha, R_MOD) + mstore(STATE_POWER_OF_ALPHA_4_SLOT, currentAlpha) + currentAlpha := mulmod(currentAlpha, alpha, R_MOD) + mstore(STATE_POWER_OF_ALPHA_5_SLOT, currentAlpha) + currentAlpha := mulmod(currentAlpha, alpha, R_MOD) + mstore(STATE_POWER_OF_ALPHA_6_SLOT, currentAlpha) + currentAlpha := mulmod(currentAlpha, alpha, R_MOD) + mstore(STATE_POWER_OF_ALPHA_7_SLOT, currentAlpha) + currentAlpha := mulmod(currentAlpha, alpha, R_MOD) + mstore(STATE_POWER_OF_ALPHA_8_SLOT, currentAlpha) + } + + // z + let stateZ := mload(STATE_Z_SLOT) + // L_0(z) + mstore(STATE_L_0_AT_Z_SLOT, evaluateLagrangePolyOutOfDomain(0, stateZ)) + // L_{n-1}(z) + mstore(STATE_L_N_MINUS_ONE_AT_Z_SLOT, evaluateLagrangePolyOutOfDomain(sub(DOMAIN_SIZE, 1), stateZ)) + // L_0(z) * PI + let stateT := mulmod(mload(STATE_L_0_AT_Z_SLOT), mload(PROOF_PUBLIC_INPUT), R_MOD) + + // Compute main gate contribution + let result := mulmod(stateT, mload(PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT), R_MOD) + + // Compute permutation contribution + result := addmod(result, permutationQuotientContribution(), R_MOD) + + // Compute lookup contribution + result := addmod(result, lookupQuotientContribution(), R_MOD) + + // Check that r(z) + r_0 = t(z) * Z_H(z) + result := addmod(mload(PROOF_LINEARISATION_POLY_OPENING_AT_Z_SLOT), result, R_MOD) + + let vanishing := addmod(mload(STATE_Z_IN_DOMAIN_SIZE), sub(R_MOD, 1), R_MOD) + let lhs := mulmod(mload(PROOF_QUOTIENT_POLY_OPENING_AT_Z_SLOT), vanishing, R_MOD) + if iszero(eq(lhs, result)) { + revertWithMessage(27, "invalid quotient evaluation") + } + } + + /// @notice Evaluating L_{polyNum}(at) out of domain + /// @dev L_i is a Lagrange polynomial for our domain such that: + /// L_i(omega^i) = 1 and L_i(omega^j) = 0 for all j != i + function evaluateLagrangePolyOutOfDomain(polyNum, at) -> res { + let omegaPower := 1 + if polyNum { + omegaPower := modexp(OMEGA, polyNum) + } + + res := addmod(modexp(at, DOMAIN_SIZE), sub(R_MOD, 1), R_MOD) + + // Vanishing polynomial can not be zero at point `at` + if iszero(res) { + revertWithMessage(28, "invalid vanishing polynomial") + } + res := mulmod(res, omegaPower, R_MOD) + let denominator := addmod(at, sub(R_MOD, omegaPower), R_MOD) + denominator := mulmod(denominator, DOMAIN_SIZE, R_MOD) + denominator := modexp(denominator, sub(R_MOD, 2)) + res := mulmod(res, denominator, R_MOD) + } + + /// @notice Compute permutation contribution to linearisation polynomial's constant term + function permutationQuotientContribution() -> res { + // res = alpha^4 * z_perm(z*omega) + res := mulmod( + mload(STATE_POWER_OF_ALPHA_4_SLOT), + mload(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT), + R_MOD + ) + + { + let gamma := mload(STATE_GAMMA_SLOT) + let beta := mload(STATE_BETA_SLOT) + + let factorMultiplier + { + // res *= sigma_0(z) * beta + gamma + a(z) + factorMultiplier := mulmod(mload(PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT), beta, R_MOD) + factorMultiplier := addmod(factorMultiplier, gamma, R_MOD) + factorMultiplier := addmod( + factorMultiplier, + mload(PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT), + R_MOD + ) + res := mulmod(res, factorMultiplier, R_MOD) + } + { + // res *= sigma_1(z) * beta + gamma + b(z) + factorMultiplier := mulmod(mload(PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT), beta, R_MOD) + factorMultiplier := addmod(factorMultiplier, gamma, R_MOD) + factorMultiplier := addmod( + factorMultiplier, + mload(PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT), + R_MOD + ) + res := mulmod(res, factorMultiplier, R_MOD) + } + { + // res *= sigma_2(z) * beta + gamma + c(z) + factorMultiplier := mulmod(mload(PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT), beta, R_MOD) + factorMultiplier := addmod(factorMultiplier, gamma, R_MOD) + factorMultiplier := addmod( + factorMultiplier, + mload(PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT), + R_MOD + ) + res := mulmod(res, factorMultiplier, R_MOD) + } + + // res *= sigma_3(z) + gamma + res := mulmod(res, addmod(mload(PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT), gamma, R_MOD), R_MOD) + } + + // res = -res + res := sub(R_MOD, res) + + // -= L_0(z) * alpha^5 + let l0AtZ := mload(STATE_L_0_AT_Z_SLOT) + l0AtZ := mulmod(l0AtZ, mload(STATE_POWER_OF_ALPHA_5_SLOT), R_MOD) + res := addmod(res, sub(R_MOD, l0AtZ), R_MOD) + } + + /// @notice Compute lookup contribution to linearisation polynomial's constant term + function lookupQuotientContribution() -> res { + let betaLookup := mload(STATE_BETA_LOOKUP_SLOT) + let gammaLookup := mload(STATE_GAMMA_LOOKUP_SLOT) + let betaPlusOne := addmod(betaLookup, 1, R_MOD) + let betaGamma := mulmod(betaPlusOne, gammaLookup, R_MOD) + + mstore(STATE_BETA_PLUS_ONE_SLOT, betaPlusOne) + mstore(STATE_BETA_GAMMA_PLUS_GAMMA_SLOT, betaGamma) + + // res = alpha^6 * (s(z*omega) * beta' + gamma' (beta' + 1)) * z_lookup(z*omega) + res := mulmod(mload(PROOF_LOOKUP_S_POLY_OPENING_AT_Z_OMEGA_SLOT), betaLookup, R_MOD) + res := addmod(res, betaGamma, R_MOD) + res := mulmod(res, mload(PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT), R_MOD) + res := mulmod(res, mload(STATE_POWER_OF_ALPHA_6_SLOT), R_MOD) + + // res *= z - omega^{n-1} + { + let lastOmega := modexp(OMEGA, sub(DOMAIN_SIZE, 1)) + let zMinusLastOmega := addmod(mload(STATE_Z_SLOT), sub(R_MOD, lastOmega), R_MOD) + mstore(STATE_Z_MINUS_LAST_OMEGA_SLOT, zMinusLastOmega) + res := mulmod(res, zMinusLastOmega, R_MOD) + } + + // res -= alpha^7 * L_{0}(z) + { + let intermediateValue := mulmod( + mload(STATE_L_0_AT_Z_SLOT), + mload(STATE_POWER_OF_ALPHA_7_SLOT), + R_MOD + ) + res := addmod(res, sub(R_MOD, intermediateValue), R_MOD) + } + + // res -= alpha^8 * L_{n-1}(z) * (gamma' (beta' + 1))^{n-1} + { + let lnMinusOneAtZ := mload(STATE_L_N_MINUS_ONE_AT_Z_SLOT) + let betaGammaPowered := modexp(betaGamma, sub(DOMAIN_SIZE, 1)) + let alphaPower8 := mload(STATE_POWER_OF_ALPHA_8_SLOT) + + let subtrahend := mulmod(mulmod(lnMinusOneAtZ, betaGammaPowered, R_MOD), alphaPower8, R_MOD) + res := addmod(res, sub(R_MOD, subtrahend), R_MOD) + } + } + + /// @notice Compute main gate contribution to linearisation polynomial commitment multiplied by v + function mainGateLinearisationContributionWithV( + dest, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ, + stateOpening3AtZ + ) { + // += a(z) * [q_a] + pointMulIntoDest(VK_GATE_SETUP_0_X_SLOT, stateOpening0AtZ, dest) + // += b(z) * [q_b] + pointMulAndAddIntoDest(VK_GATE_SETUP_1_X_SLOT, stateOpening1AtZ, dest) + // += c(z) * [q_c] + pointMulAndAddIntoDest(VK_GATE_SETUP_2_X_SLOT, stateOpening2AtZ, dest) + // += d(z) * [q_d] + pointMulAndAddIntoDest(VK_GATE_SETUP_3_X_SLOT, stateOpening3AtZ, dest) + // += a(z) * b(z) * [q_ab] + pointMulAndAddIntoDest(VK_GATE_SETUP_4_X_SLOT, mulmod(stateOpening0AtZ, stateOpening1AtZ, R_MOD), dest) + // += a(z) * c(z) * [q_ac] + pointMulAndAddIntoDest(VK_GATE_SETUP_5_X_SLOT, mulmod(stateOpening0AtZ, stateOpening2AtZ, R_MOD), dest) + // += [q_const] + pointAddAssign(dest, VK_GATE_SETUP_6_X_SLOT) + // += d(z*omega) * [q_{d_next}] + pointMulAndAddIntoDest(VK_GATE_SETUP_7_X_SLOT, mload(PROOF_STATE_POLYS_3_OPENING_AT_Z_OMEGA_SLOT), dest) + + // *= v * main_gate_selector(z) + let coeff := mulmod(mload(PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT), mload(STATE_V_SLOT), R_MOD) + pointMulIntoDest(dest, coeff, dest) + } + + /// @notice Compute custom gate contribution to linearisation polynomial commitment multiplied by v + function addAssignRescueCustomGateLinearisationContributionWithV( + dest, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ, + stateOpening3AtZ + ) { + let accumulator + let intermediateValue + // = alpha * (a(z)^2 - b(z)) + accumulator := mulmod(stateOpening0AtZ, stateOpening0AtZ, R_MOD) + accumulator := addmod(accumulator, sub(R_MOD, stateOpening1AtZ), R_MOD) + accumulator := mulmod(accumulator, mload(STATE_ALPHA_SLOT), R_MOD) + // += alpha^2 * (b(z)^2 - c(z)) + intermediateValue := mulmod(stateOpening1AtZ, stateOpening1AtZ, R_MOD) + intermediateValue := addmod(intermediateValue, sub(R_MOD, stateOpening2AtZ), R_MOD) + intermediateValue := mulmod(intermediateValue, mload(STATE_POWER_OF_ALPHA_2_SLOT), R_MOD) + accumulator := addmod(accumulator, intermediateValue, R_MOD) + // += alpha^3 * (c(z) * a(z) - d(z)) + intermediateValue := mulmod(stateOpening2AtZ, stateOpening0AtZ, R_MOD) + intermediateValue := addmod(intermediateValue, sub(R_MOD, stateOpening3AtZ), R_MOD) + intermediateValue := mulmod(intermediateValue, mload(STATE_POWER_OF_ALPHA_3_SLOT), R_MOD) + accumulator := addmod(accumulator, intermediateValue, R_MOD) + + // *= v * [custom_gate_selector] + accumulator := mulmod(accumulator, mload(STATE_V_SLOT), R_MOD) + pointMulAndAddIntoDest(VK_GATE_SELECTORS_1_X_SLOT, accumulator, dest) + } + + /// @notice Compute copy-permutation contribution to linearisation polynomial commitment multiplied by v + function addAssignPermutationLinearisationContributionWithV( + dest, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ, + stateOpening3AtZ + ) { + // alpha^4 + let factor := mload(STATE_POWER_OF_ALPHA_4_SLOT) + // Calculate the factor + { + // *= (a(z) + beta * z + gamma) + let zMulBeta := mulmod(mload(STATE_Z_SLOT), mload(STATE_BETA_SLOT), R_MOD) + let gamma := mload(STATE_GAMMA_SLOT) + + let intermediateValue := addmod(addmod(zMulBeta, gamma, R_MOD), stateOpening0AtZ, R_MOD) + factor := mulmod(factor, intermediateValue, R_MOD) + + // (b(z) + beta * z * k0 + gamma) + intermediateValue := addmod( + addmod(mulmod(zMulBeta, NON_RESIDUES_0, R_MOD), gamma, R_MOD), + stateOpening1AtZ, + R_MOD + ) + factor := mulmod(factor, intermediateValue, R_MOD) + + // (c(z) + beta * z * k1 + gamma) + intermediateValue := addmod( + addmod(mulmod(zMulBeta, NON_RESIDUES_1, R_MOD), gamma, R_MOD), + stateOpening2AtZ, + R_MOD + ) + factor := mulmod(factor, intermediateValue, R_MOD) + + // (d(z) + beta * z * k2 + gamma) + intermediateValue := addmod( + addmod(mulmod(zMulBeta, NON_RESIDUES_2, R_MOD), gamma, R_MOD), + stateOpening3AtZ, + R_MOD + ) + factor := mulmod(factor, intermediateValue, R_MOD) + } + + // += alpha^5 * L_0(z) + let l0AtZ := mload(STATE_L_0_AT_Z_SLOT) + factor := addmod(factor, mulmod(l0AtZ, mload(STATE_POWER_OF_ALPHA_5_SLOT), R_MOD), R_MOD) + + // Here we can optimize one scalar multiplication by aggregating coefficients near [z_perm] during + // computing [F] + // We will sum them and add and make one scalar multiplication: (coeff1 + coeff2) * [z_perm] + factor := mulmod(factor, mload(STATE_V_SLOT), R_MOD) + mstore(COPY_PERMUTATION_FIRST_AGGREGATED_COMMITMENT_COEFF, factor) + + // alpha^4 * beta * z_perm(z*omega) + factor := mulmod(mload(STATE_POWER_OF_ALPHA_4_SLOT), mload(STATE_BETA_SLOT), R_MOD) + factor := mulmod(factor, mload(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT), R_MOD) + { + // *= (a(z) + beta * sigma_0(z) + gamma) + let beta := mload(STATE_BETA_SLOT) + let gamma := mload(STATE_GAMMA_SLOT) + + let intermediateValue := addmod( + addmod( + mulmod(mload(PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT), beta, R_MOD), + gamma, + R_MOD + ), + stateOpening0AtZ, + R_MOD + ) + factor := mulmod(factor, intermediateValue, R_MOD) + + // *= (b(z) + beta * sigma_1(z) + gamma) + intermediateValue := addmod( + addmod( + mulmod(mload(PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT), beta, R_MOD), + gamma, + R_MOD + ), + stateOpening1AtZ, + R_MOD + ) + factor := mulmod(factor, intermediateValue, R_MOD) + + // *= (c(z) + beta * sigma_2(z) + gamma) + intermediateValue := addmod( + addmod( + mulmod(mload(PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT), beta, R_MOD), + gamma, + R_MOD + ), + stateOpening2AtZ, + R_MOD + ) + factor := mulmod(factor, intermediateValue, R_MOD) + } + + // *= v * [sigma_3] + factor := mulmod(factor, mload(STATE_V_SLOT), R_MOD) + pointMulIntoDest(VK_PERMUTATION_3_X_SLOT, factor, QUERIES_BUFFER_POINT_SLOT) + + pointSubAssign(dest, QUERIES_BUFFER_POINT_SLOT) + } + + /// @notice Compute lookup contribution to linearisation polynomial commitment multiplied by v + function addAssignLookupLinearisationContributionWithV( + dest, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ + ) { + // alpha^6 * v * z_lookup(z*omega) * (z - omega^{n-1}) * [s] + let factor := mload(PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT) + factor := mulmod(factor, mload(STATE_POWER_OF_ALPHA_6_SLOT), R_MOD) + factor := mulmod(factor, mload(STATE_Z_MINUS_LAST_OMEGA_SLOT), R_MOD) + factor := mulmod(factor, mload(STATE_V_SLOT), R_MOD) + + // Here we can optimize one scalar multiplication by aggregating coefficients near [s] during + // computing [F] + // We will sum them and add and make one scalar multiplication: (coeff1 + coeff2) * [s] + mstore(LOOKUP_S_FIRST_AGGREGATED_COMMITMENT_COEFF, factor) + + // gamma(1 + beta) + t(x) + beta * t(x*omega) + factor := mload(PROOF_LOOKUP_T_POLY_OPENING_AT_Z_OMEGA_SLOT) + factor := mulmod(factor, mload(STATE_BETA_LOOKUP_SLOT), R_MOD) + factor := addmod(factor, mload(PROOF_LOOKUP_T_POLY_OPENING_AT_Z_SLOT), R_MOD) + factor := addmod(factor, mload(STATE_BETA_GAMMA_PLUS_GAMMA_SLOT), R_MOD) + + // *= (gamma + f(z)) + // We should use fact that f(x) = + // lookup_selector(x) * (a(x) + eta * b(x) + eta^2 * c(x) + eta^3 * table_type(x)) + // to restore f(z) + let fReconstructed + { + fReconstructed := stateOpening0AtZ + let eta := mload(STATE_ETA_SLOT) + let currentEta := eta + + fReconstructed := addmod(fReconstructed, mulmod(currentEta, stateOpening1AtZ, R_MOD), R_MOD) + currentEta := mulmod(currentEta, eta, R_MOD) + fReconstructed := addmod(fReconstructed, mulmod(currentEta, stateOpening2AtZ, R_MOD), R_MOD) + currentEta := mulmod(currentEta, eta, R_MOD) + + // add type of table + fReconstructed := addmod( + fReconstructed, + mulmod(mload(PROOF_LOOKUP_TABLE_TYPE_POLY_OPENING_AT_Z_SLOT), currentEta, R_MOD), + R_MOD + ) + fReconstructed := mulmod(fReconstructed, mload(PROOF_LOOKUP_SELECTOR_POLY_OPENING_AT_Z_SLOT), R_MOD) + fReconstructed := addmod(fReconstructed, mload(STATE_GAMMA_LOOKUP_SLOT), R_MOD) + } + // *= -alpha^6 * (beta + 1) * (z - omega^{n-1}) + factor := mulmod(factor, fReconstructed, R_MOD) + factor := mulmod(factor, mload(STATE_BETA_PLUS_ONE_SLOT), R_MOD) + factor := sub(R_MOD, factor) + factor := mulmod(factor, mload(STATE_POWER_OF_ALPHA_6_SLOT), R_MOD) + + factor := mulmod(factor, mload(STATE_Z_MINUS_LAST_OMEGA_SLOT), R_MOD) + + // += alpha^7 * L_0(z) + factor := addmod( + factor, + mulmod(mload(STATE_L_0_AT_Z_SLOT), mload(STATE_POWER_OF_ALPHA_7_SLOT), R_MOD), + R_MOD + ) + + // += alpha^8 * L_{n-1}(z) + factor := addmod( + factor, + mulmod(mload(STATE_L_N_MINUS_ONE_AT_Z_SLOT), mload(STATE_POWER_OF_ALPHA_8_SLOT), R_MOD), + R_MOD + ) + + // Here we can optimize one scalar multiplication by aggregating coefficients near [z_lookup] during + // computing [F] + // We will sum them and add and make one scalar multiplication: (coeff1 + coeff2) * [z_lookup] + factor := mulmod(factor, mload(STATE_V_SLOT), R_MOD) + mstore(LOOKUP_GRAND_PRODUCT_FIRST_AGGREGATED_COMMITMENT_COEFF, factor) + } + + /*////////////////////////////////////////////////////////////// + 4. Prepare queries + //////////////////////////////////////////////////////////////*/ + + /// @dev Here we compute the first and second parts of batched polynomial commitment + /// We use the formula: + /// [D0] = [t_0] + z^n * [t_1] + z^{2n} * [t_2] + z^{3n} * [t_3] + /// and + /// [D1] = main_gate_selector(z) * ( \ + /// a(z) * [q_a] + b(z) * [q_b] + c(z) * [q_c] + d(z) * [q_d] + | - main gate contribution + /// a(z) * b(z) * [q_ab] + a(z) * c(z) * [q_ac] + | + /// [q_const] + d(z*omega) * [q_{d_next}]) / + /// + /// + alpha * [custom_gate_selector] * ( \ + /// (a(z)^2 - b(z)) + | - custom gate contribution + /// (b(z)^2 - c(z)) * alpha + | + /// (a(z)*c(z) - d(z)) * alpha^2 ) / + /// + /// + alpha^4 * [z_perm] * \ + /// (a(z) + beta * z + gamma) * | + /// (b(z) + beta * z * k0 + gamma) * | + /// (c(z) + beta * z * k1 + gamma) * | + /// (d(z) + beta * z * k2 + gamma) | - permutation contribution + /// - alpha^4 * z_perm(z*omega) * beta * [sigma_3] * | + /// (a(z) + beta * sigma_0(z) + gamma) * | + /// (b(z) + beta * sigma_1(z) + gamma) * | + /// (c(z) + beta * sigma_2(z) + gamma) * | + /// + alpha^5 * L_0(z) * [z_perm] / + /// + /// - alpha^6 * (1 + beta') * (gamma' + f(z)) * (z - omega^{n-1}) * \ + /// (gamma'(1 + beta') + t(z) + beta' * t(z*omega)) * [z_lookup] | + /// + alpha^6 * z_lookup(z*omega) * (z - omega^{n-1}) * [s] | - lookup contribution + /// + alpha^7 * L_0(z) * [z_lookup] | + /// + alpha^8 * L_{n-1}(z) * [z_lookup] / + function prepareQueries() { + // Calculate [D0] + { + let zInDomainSize := mload(STATE_Z_IN_DOMAIN_SIZE) + let currentZ := zInDomainSize + + mstore(QUERIES_AT_Z_0_X_SLOT, mload(PROOF_QUOTIENT_POLY_PARTS_0_X_SLOT)) + mstore(QUERIES_AT_Z_0_Y_SLOT, mload(PROOF_QUOTIENT_POLY_PARTS_0_Y_SLOT)) + + pointMulAndAddIntoDest(PROOF_QUOTIENT_POLY_PARTS_1_X_SLOT, currentZ, QUERIES_AT_Z_0_X_SLOT) + currentZ := mulmod(currentZ, zInDomainSize, R_MOD) + + pointMulAndAddIntoDest(PROOF_QUOTIENT_POLY_PARTS_2_X_SLOT, currentZ, QUERIES_AT_Z_0_X_SLOT) + currentZ := mulmod(currentZ, zInDomainSize, R_MOD) + + pointMulAndAddIntoDest(PROOF_QUOTIENT_POLY_PARTS_3_X_SLOT, currentZ, QUERIES_AT_Z_0_X_SLOT) + } + + // Calculate v * [D1] + // We are going to multiply all the points in the sum by v to save + // one scalar multiplication during [F] computation + { + let stateOpening0AtZ := mload(PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT) + let stateOpening1AtZ := mload(PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT) + let stateOpening2AtZ := mload(PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT) + let stateOpening3AtZ := mload(PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT) + + mainGateLinearisationContributionWithV( + QUERIES_AT_Z_1_X_SLOT, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ, + stateOpening3AtZ + ) + + addAssignRescueCustomGateLinearisationContributionWithV( + QUERIES_AT_Z_1_X_SLOT, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ, + stateOpening3AtZ + ) + + addAssignPermutationLinearisationContributionWithV( + QUERIES_AT_Z_1_X_SLOT, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ, + stateOpening3AtZ + ) + + addAssignLookupLinearisationContributionWithV( + QUERIES_AT_Z_1_X_SLOT, + stateOpening0AtZ, + stateOpening1AtZ, + stateOpening2AtZ + ) + } + + // Also we should restore [t] for future computations + // [t] = [col_0] + eta*[col_1] + eta^2*[col_2] + eta^3*[col_3] + { + mstore(QUERIES_T_POLY_AGGREGATED_X_SLOT, mload(VK_LOOKUP_TABLE_0_X_SLOT)) + mstore(QUERIES_T_POLY_AGGREGATED_Y_SLOT, mload(VK_LOOKUP_TABLE_0_Y_SLOT)) + + let eta := mload(STATE_ETA_SLOT) + let currentEta := eta + + pointMulAndAddIntoDest(VK_LOOKUP_TABLE_1_X_SLOT, currentEta, QUERIES_T_POLY_AGGREGATED_X_SLOT) + currentEta := mulmod(currentEta, eta, R_MOD) + + pointMulAndAddIntoDest(VK_LOOKUP_TABLE_2_X_SLOT, currentEta, QUERIES_T_POLY_AGGREGATED_X_SLOT) + currentEta := mulmod(currentEta, eta, R_MOD) + + pointMulAndAddIntoDest(VK_LOOKUP_TABLE_3_X_SLOT, currentEta, QUERIES_T_POLY_AGGREGATED_X_SLOT) + } + } + + /*////////////////////////////////////////////////////////////// + 5. Prepare aggregated commitment + //////////////////////////////////////////////////////////////*/ + + /// @dev Here we compute aggregated commitment for the final pairing + /// We use the formula: + /// [E] = ( t(z) + v * r(z) + /// + v^2*a(z) + v^3*b(z) + v^4*c(z) + v^5*d(z) + /// + v^6*main_gate_selector(z) + /// + v^7*sigma_0(z) + v^8*sigma_1(z) + v^9*sigma_2(z) + /// + v^10*t(z) + v^11*lookup_selector(z) + v^12*table_type(z) + /// + u * (v^13*z_perm(z*omega) + v^14*d(z*omega) + /// + v^15*s(z*omega) + v^16*z_lookup(z*omega) + v^17*t(z*omega) + /// ) + /// ) * [1] + /// and + /// [F] = [D0] + v * [D1] + /// + v^2*[a] + v^3*[b] + v^4*[c] + v^5*[d] + /// + v^6*[main_gate_selector] + /// + v^7*[sigma_0] + v^8*[sigma_1] + v^9*[sigma_2] + /// + v^10*[t] + v^11*[lookup_selector] + v^12*[table_type] + /// + u * ( v^13*[z_perm] + v^14*[d] + /// + v^15*[s] + v^16*[z_lookup] + v^17*[t] + /// ) + function prepareAggregatedCommitment() { + // Here we compute parts of [E] and [F] without u multiplier + let aggregationChallenge := 1 + let firstDCoeff + let firstTCoeff + + mstore(AGGREGATED_AT_Z_X_SLOT, mload(QUERIES_AT_Z_0_X_SLOT)) + mstore(AGGREGATED_AT_Z_Y_SLOT, mload(QUERIES_AT_Z_0_Y_SLOT)) + let aggregatedOpeningAtZ := mload(PROOF_QUOTIENT_POLY_OPENING_AT_Z_SLOT) + { + function updateAggregationChallenge( + queriesCommitmentPoint, + valueAtZ, + curAggregationChallenge, + curAggregatedOpeningAtZ + ) -> newAggregationChallenge, newAggregatedOpeningAtZ { + newAggregationChallenge := mulmod(curAggregationChallenge, mload(STATE_V_SLOT), R_MOD) + pointMulAndAddIntoDest(queriesCommitmentPoint, newAggregationChallenge, AGGREGATED_AT_Z_X_SLOT) + newAggregatedOpeningAtZ := addmod( + curAggregatedOpeningAtZ, + mulmod(newAggregationChallenge, mload(valueAtZ), R_MOD), + R_MOD + ) + } + + // We don't need to multiply by v, because we have already computed v * [D1] + pointAddIntoDest(AGGREGATED_AT_Z_X_SLOT, QUERIES_AT_Z_1_X_SLOT, AGGREGATED_AT_Z_X_SLOT) + aggregationChallenge := mulmod(aggregationChallenge, mload(STATE_V_SLOT), R_MOD) + aggregatedOpeningAtZ := addmod( + aggregatedOpeningAtZ, + mulmod(aggregationChallenge, mload(PROOF_LINEARISATION_POLY_OPENING_AT_Z_SLOT), R_MOD), + R_MOD + ) + + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + PROOF_STATE_POLYS_0_X_SLOT, + PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + PROOF_STATE_POLYS_1_X_SLOT, + PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + PROOF_STATE_POLYS_2_X_SLOT, + PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + + // Here we can optimize one scalar multiplication by aggregating coefficients near [d] + // We will sum them and add and make one scalar multiplication: (coeff1 + coeff2) * [d] + aggregationChallenge := mulmod(aggregationChallenge, mload(STATE_V_SLOT), R_MOD) + firstDCoeff := aggregationChallenge + aggregatedOpeningAtZ := addmod( + aggregatedOpeningAtZ, + mulmod(aggregationChallenge, mload(PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT), R_MOD), + R_MOD + ) + + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + VK_GATE_SELECTORS_0_X_SLOT, + PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + VK_PERMUTATION_0_X_SLOT, + PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + VK_PERMUTATION_1_X_SLOT, + PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + VK_PERMUTATION_2_X_SLOT, + PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + + // Here we can optimize one scalar multiplication by aggregating coefficients near [t] + // We will sum them and add and make one scalar multiplication: (coeff1 + coeff2) * [t] + aggregationChallenge := mulmod(aggregationChallenge, mload(STATE_V_SLOT), R_MOD) + firstTCoeff := aggregationChallenge + aggregatedOpeningAtZ := addmod( + aggregatedOpeningAtZ, + mulmod(aggregationChallenge, mload(PROOF_LOOKUP_T_POLY_OPENING_AT_Z_SLOT), R_MOD), + R_MOD + ) + + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + VK_LOOKUP_SELECTOR_X_SLOT, + PROOF_LOOKUP_SELECTOR_POLY_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + aggregationChallenge, aggregatedOpeningAtZ := updateAggregationChallenge( + VK_LOOKUP_TABLE_TYPE_X_SLOT, + PROOF_LOOKUP_TABLE_TYPE_POLY_OPENING_AT_Z_SLOT, + aggregationChallenge, + aggregatedOpeningAtZ + ) + } + mstore(AGGREGATED_OPENING_AT_Z_SLOT, aggregatedOpeningAtZ) + + // Here we compute parts of [E] and [F] with u multiplier + aggregationChallenge := mulmod(aggregationChallenge, mload(STATE_V_SLOT), R_MOD) + + let copyPermutationCoeff := addmod( + mload(COPY_PERMUTATION_FIRST_AGGREGATED_COMMITMENT_COEFF), + mulmod(aggregationChallenge, mload(STATE_U_SLOT), R_MOD), + R_MOD + ) + + pointMulIntoDest( + PROOF_COPY_PERMUTATION_GRAND_PRODUCT_X_SLOT, + copyPermutationCoeff, + AGGREGATED_AT_Z_OMEGA_X_SLOT + ) + let aggregatedOpeningAtZOmega := mulmod( + mload(PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT), + aggregationChallenge, + R_MOD + ) + + { + function updateAggregationChallenge( + queriesCommitmentPoint, + valueAtZ_Omega, + previousCoeff, + curAggregationChallenge, + curAggregatedOpeningAtZ_Omega + ) -> newAggregationChallenge, newAggregatedOpeningAtZ_Omega { + newAggregationChallenge := mulmod(curAggregationChallenge, mload(STATE_V_SLOT), R_MOD) + let finalCoeff := addmod( + previousCoeff, + mulmod(newAggregationChallenge, mload(STATE_U_SLOT), R_MOD), + R_MOD + ) + pointMulAndAddIntoDest(queriesCommitmentPoint, finalCoeff, AGGREGATED_AT_Z_OMEGA_X_SLOT) + newAggregatedOpeningAtZ_Omega := addmod( + curAggregatedOpeningAtZ_Omega, + mulmod(newAggregationChallenge, mload(valueAtZ_Omega), R_MOD), + R_MOD + ) + } + + aggregationChallenge, aggregatedOpeningAtZOmega := updateAggregationChallenge( + PROOF_STATE_POLYS_3_X_SLOT, + PROOF_STATE_POLYS_3_OPENING_AT_Z_OMEGA_SLOT, + firstDCoeff, + aggregationChallenge, + aggregatedOpeningAtZOmega + ) + aggregationChallenge, aggregatedOpeningAtZOmega := updateAggregationChallenge( + PROOF_LOOKUP_S_POLY_X_SLOT, + PROOF_LOOKUP_S_POLY_OPENING_AT_Z_OMEGA_SLOT, + mload(LOOKUP_S_FIRST_AGGREGATED_COMMITMENT_COEFF), + aggregationChallenge, + aggregatedOpeningAtZOmega + ) + aggregationChallenge, aggregatedOpeningAtZOmega := updateAggregationChallenge( + PROOF_LOOKUP_GRAND_PRODUCT_X_SLOT, + PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT, + mload(LOOKUP_GRAND_PRODUCT_FIRST_AGGREGATED_COMMITMENT_COEFF), + aggregationChallenge, + aggregatedOpeningAtZOmega + ) + aggregationChallenge, aggregatedOpeningAtZOmega := updateAggregationChallenge( + QUERIES_T_POLY_AGGREGATED_X_SLOT, + PROOF_LOOKUP_T_POLY_OPENING_AT_Z_OMEGA_SLOT, + firstTCoeff, + aggregationChallenge, + aggregatedOpeningAtZOmega + ) + } + mstore(AGGREGATED_OPENING_AT_Z_OMEGA_SLOT, aggregatedOpeningAtZOmega) + + // Now we can merge both parts and get [E] and [F] + let u := mload(STATE_U_SLOT) + + // [F] + pointAddIntoDest( + AGGREGATED_AT_Z_X_SLOT, + AGGREGATED_AT_Z_OMEGA_X_SLOT, + PAIRING_PAIR_WITH_GENERATOR_X_SLOT + ) + + // [E] = (aggregatedOpeningAtZ + u * aggregatedOpeningAtZOmega) * [1] + let aggregatedValue := addmod( + mulmod(mload(AGGREGATED_OPENING_AT_Z_OMEGA_SLOT), u, R_MOD), + mload(AGGREGATED_OPENING_AT_Z_SLOT), + R_MOD + ) + + mstore(PAIRING_BUFFER_POINT_X_SLOT, 1) + mstore(PAIRING_BUFFER_POINT_Y_SLOT, 2) + pointMulIntoDest(PAIRING_BUFFER_POINT_X_SLOT, aggregatedValue, PAIRING_BUFFER_POINT_X_SLOT) + } + + /*////////////////////////////////////////////////////////////// + 5. Pairing + //////////////////////////////////////////////////////////////*/ + + /// @notice Checks the final pairing + /// @dev We should check the equation: + /// e([W] + u * [W'], [x]_2) = e(z * [W] + u * z * omega * [W'] + [F] - [E], [1]_2), + /// where [F] and [E] were computed previously + /// + /// Also we need to check that e([P1], [x]_2) = e([P2], [1]_2) + /// if we have the recursive part of the proof + /// where [P1] and [P2] are parts of the recursive proof + /// + /// We can aggregate both pairings into one for gas optimization: + /// e([W] + u * [W'] + u^2 * [P1], [x]_2) = + /// e(z * [W] + u * z * omega * [W'] + [F] - [E] + u^2 * [P2], [1]_2) + /// + /// u is a valid challenge for such aggregation, + /// because [P1] and [P2] are used in PI + function finalPairing() { + let u := mload(STATE_U_SLOT) + let z := mload(STATE_Z_SLOT) + let zOmega := mulmod(mload(STATE_Z_SLOT), OMEGA, R_MOD) + + // [F] - [E] + pointSubAssign(PAIRING_PAIR_WITH_GENERATOR_X_SLOT, PAIRING_BUFFER_POINT_X_SLOT) + + // +z * [W] + u * z * omega * [W'] + pointMulAndAddIntoDest(PROOF_OPENING_PROOF_AT_Z_X_SLOT, z, PAIRING_PAIR_WITH_GENERATOR_X_SLOT) + pointMulAndAddIntoDest( + PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT, + mulmod(zOmega, u, R_MOD), + PAIRING_PAIR_WITH_GENERATOR_X_SLOT + ) + + // [W] + u * [W'] + mstore(PAIRING_PAIR_WITH_X_X_SLOT, mload(PROOF_OPENING_PROOF_AT_Z_X_SLOT)) + mstore(PAIRING_PAIR_WITH_X_Y_SLOT, mload(PROOF_OPENING_PROOF_AT_Z_Y_SLOT)) + pointMulAndAddIntoDest(PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT, u, PAIRING_PAIR_WITH_X_X_SLOT) + pointNegate(PAIRING_PAIR_WITH_X_X_SLOT) + + // Add recursive proof part if needed + if mload(VK_RECURSIVE_FLAG_SLOT) { + let uu := mulmod(u, u, R_MOD) + pointMulAndAddIntoDest(PROOF_RECURSIVE_PART_P1_X_SLOT, uu, PAIRING_PAIR_WITH_GENERATOR_X_SLOT) + pointMulAndAddIntoDest(PROOF_RECURSIVE_PART_P2_X_SLOT, uu, PAIRING_PAIR_WITH_X_X_SLOT) + } + + // Calculate pairing + { + mstore(0x000, mload(PAIRING_PAIR_WITH_GENERATOR_X_SLOT)) + mstore(0x020, mload(PAIRING_PAIR_WITH_GENERATOR_Y_SLOT)) + + mstore(0x040, G2_ELEMENTS_0_X1) + mstore(0x060, G2_ELEMENTS_0_X2) + mstore(0x080, G2_ELEMENTS_0_Y1) + mstore(0x0a0, G2_ELEMENTS_0_Y2) + + mstore(0x0c0, mload(PAIRING_PAIR_WITH_X_X_SLOT)) + mstore(0x0e0, mload(PAIRING_PAIR_WITH_X_Y_SLOT)) + + mstore(0x100, G2_ELEMENTS_1_X1) + mstore(0x120, G2_ELEMENTS_1_X2) + mstore(0x140, G2_ELEMENTS_1_Y1) + mstore(0x160, G2_ELEMENTS_1_Y2) + + let success := staticcall(gas(), 8, 0, 0x180, 0x00, 0x20) + if iszero(success) { + revertWithMessage(32, "finalPairing: precompile failure") + } + if iszero(mload(0)) { + revertWithMessage(29, "finalPairing: pairing failure") + } + } + } + + /*////////////////////////////////////////////////////////////// + Verification + //////////////////////////////////////////////////////////////*/ + + // Step 1: Load the proof and check the correctness of its parts + loadProof() + + // Step 2: Recompute all the challenges with the transcript + initializeTranscript() + + // Step 3: Check the quotient equality + verifyQuotientEvaluation() + + // Step 4: Compute queries [D0] and v * [D1] + prepareQueries() + + // Step 5: Compute [E] and [F] + prepareAggregatedCommitment() + + // Step 6: Check the final pairing with aggregated recursive proof + finalPairing() + + mstore(0, true) + return(0, 32) + } + } +} diff --git a/l2-contracts/contracts/verifier/chain-interfaces/IVerifier.sol b/l2-contracts/contracts/verifier/chain-interfaces/IVerifier.sol new file mode 100644 index 000000000..fe5e2af2c --- /dev/null +++ b/l2-contracts/contracts/verifier/chain-interfaces/IVerifier.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.21; + +/// @notice Part of the configuration parameters of ZKP circuits +struct VerifierParams { + bytes32 recursionNodeLevelVkHash; + bytes32 recursionLeafLevelVkHash; + bytes32 recursionCircuitsSetVksHash; +} + +/// @title The interface of the Verifier contract, responsible for the zero knowledge proof verification. +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IVerifier { + /// @dev Verifies a zk-SNARK proof. + /// @return A boolean value indicating whether the zk-SNARK proof is valid. + /// Note: The function may revert execution instead of returning false in some cases. + function verify(uint256[] calldata _publicInputs, uint256[] calldata _proof) external view returns (bool); + + /// @notice Calculates a keccak256 hash of the runtime loaded verification keys. + /// @return vkHash The keccak256 hash of the loaded verification keys. + function verificationKeyHash() external pure returns (bytes32); +} diff --git a/l2-contracts/foundry.toml b/l2-contracts/foundry.toml new file mode 100644 index 000000000..b369e211f --- /dev/null +++ b/l2-contracts/foundry.toml @@ -0,0 +1,27 @@ +[profile.default] +src = "contracts" +out = "out" +libs = ["lib"] +test = "test/foundry" +solc_version = "0.8.24" +cache_path = "cache-forge" +via_ir = true +evm_version = "paris" +ignored_error_codes = ["missing-receive-ether", "code-size"] +ignored_warnings_from = ["test", "contracts/dev-contracts"] +remappings = [ + "forge-std/=lib/forge-std/src/", + "foundry-test/=test/foundry/", + "@openzeppelin/contracts-v4/=lib/openzeppelin-contracts-v4/contracts/", + "@openzeppelin/contracts-upgradeable-v4/=lib/openzeppelin-contracts-upgradeable-v4/contracts/", + "@matterlabs/zksync-contracts/=lib/@matterlabs/zksync-contracts/", +] +fs_permissions = [ + { access = "read", path = "zkout" }, + { access = "read", path = "../system-contracts/bootloader/build/artifacts" }, + { access = "read", path = "../system-contracts/artifacts-zk/contracts-preprocessed" } +] + +[profile.default.zksync] +enable_eravm_extensions = true +zksolc = "1.5.3" diff --git a/l2-contracts/hardhat.config.ts b/l2-contracts/hardhat.config.ts index c0aaca03e..235930123 100644 --- a/l2-contracts/hardhat.config.ts +++ b/l2-contracts/hardhat.config.ts @@ -12,14 +12,14 @@ if (!process.env.CHAIN_ETH_NETWORK) { export default { zksolc: { - version: "1.3.18", + version: "1.5.3", compilerSource: "binary", settings: { isSystem: true, }, }, solidity: { - version: "0.8.20", + version: "0.8.24", }, defaultNetwork: "localhost", networks: { diff --git a/l2-contracts/lib/@matterlabs b/l2-contracts/lib/@matterlabs new file mode 120000 index 000000000..beffd09fc --- /dev/null +++ b/l2-contracts/lib/@matterlabs @@ -0,0 +1 @@ +../../lib/@matterlabs \ No newline at end of file diff --git a/l2-contracts/lib/forge-std b/l2-contracts/lib/forge-std new file mode 120000 index 000000000..edce15694 --- /dev/null +++ b/l2-contracts/lib/forge-std @@ -0,0 +1 @@ +../../lib/forge-std \ No newline at end of file diff --git a/l2-contracts/lib/openzeppelin-contracts-upgradeable-v4 b/l2-contracts/lib/openzeppelin-contracts-upgradeable-v4 new file mode 120000 index 000000000..0551b6016 --- /dev/null +++ b/l2-contracts/lib/openzeppelin-contracts-upgradeable-v4 @@ -0,0 +1 @@ +../../lib/openzeppelin-contracts-upgradeable-v4 \ No newline at end of file diff --git a/l2-contracts/lib/openzeppelin-contracts-v4 b/l2-contracts/lib/openzeppelin-contracts-v4 new file mode 120000 index 000000000..693e94537 --- /dev/null +++ b/l2-contracts/lib/openzeppelin-contracts-v4 @@ -0,0 +1 @@ +../../lib/openzeppelin-contracts-v4 \ No newline at end of file diff --git a/l2-contracts/package.json b/l2-contracts/package.json index 891b348a3..3c871ee3b 100644 --- a/l2-contracts/package.json +++ b/l2-contracts/package.json @@ -3,16 +3,17 @@ "version": "0.1.0", "license": "MIT", "devDependencies": { - "@matterlabs/hardhat-zksync-deploy": "^0.6.5", + "@matterlabs/hardhat-zksync-deploy": "^0.7.0", "@matterlabs/hardhat-zksync-solc": "^0.3.15", - "@matterlabs/hardhat-zksync-verify": "^0.2.0", + "@matterlabs/hardhat-zksync-verify": "^0.4.0", + "@matterlabs/zksync-contracts": "^0.6.1", "@nomicfoundation/hardhat-chai-matchers": "^1.0.6", "@nomicfoundation/hardhat-ethers": "^3.0.4", "@nomicfoundation/hardhat-verify": "^1.1.0", "@nomiclabs/hardhat-ethers": "^2.0.0", "@nomiclabs/hardhat-etherscan": "^3.1.7", - "@openzeppelin/contracts": "4.9.5", - "@openzeppelin/contracts-upgradeable": "4.9.5", + "@openzeppelin/contracts-upgradeable-v4": "npm:@openzeppelin/contracts-upgradeable@4.9.5", + "@openzeppelin/contracts-v4": "npm:@openzeppelin/contracts@4.9.5", "@typechain/ethers-v5": "^2.0.0", "@types/chai": "^4.2.21", "@types/chai-as-promised": "^7.1.4", @@ -28,16 +29,18 @@ "ts-node": "^10.1.0", "typechain": "^4.0.0", "typescript": "^5.2.2", - "zksync-web3": "^0.15.4" + "zksync-ethers": "^5.9.0" }, "scripts": { "build": "hardhat compile", + "test:foundry": "forge test --zksync --gas-limit 2000000000", "clean": "hardhat clean", "test": "hardhat test", "verify": "hardhat run src/verify.ts", "deploy-testnet-paymaster-through-l1": "ts-node src/deploy-testnet-paymaster-through-l1.ts", "deploy-force-deploy-upgrader-through-l1": "ts-node src/deploy-force-deploy-upgrader-through-l1.ts", "deploy-shared-bridge-on-l2-through-l1": "ts-node src/deploy-shared-bridge-on-l2-through-l1.ts", + "deploy-l2-da-validator-on-l2-through-l1": "ts-node src/deploy-l2-da-validator-on-l2-through-l1.ts", "publish-bridge-preimages": "ts-node src/publish-bridge-preimages.ts", "deploy-l2-weth": "ts-node src/deploy-l2-weth.ts", "upgrade-bridge-contracts": "ts-node src/upgrade-bridge-impl.ts", diff --git a/l2-contracts/src/deploy-l2-da-validator-on-l2-through-l1.ts b/l2-contracts/src/deploy-l2-da-validator-on-l2-through-l1.ts new file mode 100644 index 000000000..a9bfb7eca --- /dev/null +++ b/l2-contracts/src/deploy-l2-da-validator-on-l2-through-l1.ts @@ -0,0 +1,118 @@ +import { Command } from "commander"; +import type { BigNumberish } from "ethers"; +import { ethers, Wallet } from "ethers"; +import { formatUnits, parseUnits } from "ethers/lib/utils"; +import { computeL2Create2Address, create2DeployFromL1, provider, priorityTxMaxGasLimit } from "./utils"; + +import { ethTestConfig } from "./deploy-utils"; + +import { Deployer } from "../../l1-contracts/src.ts/deploy"; +import { GAS_MULTIPLIER } from "../../l1-contracts/scripts/utils"; +import { AdminFacetFactory } from "../../l1-contracts/typechain"; +import * as hre from "hardhat"; + +async function deployContractOnL2ThroughL1( + deployer: Deployer, + name: string, + chainId: string, + gasPrice: BigNumberish +): Promise { + const bytecode = hre.artifacts.readArtifactSync(name).bytecode; + const address = computeL2Create2Address( + deployer.deployWallet, + bytecode, + // Empty constructor data + "0x", + ethers.constants.HashZero + ); + + const tx = await create2DeployFromL1( + chainId, + deployer.deployWallet, + bytecode, + "0x", + ethers.constants.HashZero, + priorityTxMaxGasLimit, + gasPrice + ); + + await tx.wait(); + + return address; +} + +async function main() { + const program = new Command(); + + program.version("0.1.0").name("deploy-shared-bridge-on-l2-through-l1"); + + program + .option("--private-key ") + .option("--chain-id ") + .option("--local-legacy-bridge-testing") + .option("--gas-price ") + .option("--nonce ") + .option("--erc20-bridge ") + .option("--validium-mode") + .action(async (cmd) => { + const chainId: string = cmd.chainId ? cmd.chainId : process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID; + const deployWallet = cmd.privateKey + ? new Wallet(cmd.privateKey, provider) + : Wallet.fromMnemonic( + process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, + "m/44'/60'/0'/0/1" + ).connect(provider); + console.log(`Using deployer wallet: ${deployWallet.address}`); + + const deployer = new Deployer({ + deployWallet, + ownerAddress: deployWallet.address, + verbose: true, + }); + + const nonce = cmd.nonce ? parseInt(cmd.nonce) : await deployer.deployWallet.getTransactionCount(); + console.log(`Using nonce: ${nonce}`); + + const gasPrice = cmd.gasPrice + ? parseUnits(cmd.gasPrice, "gwei") + : (await provider.getGasPrice()).mul(GAS_MULTIPLIER); + console.log(`Using gas price: ${formatUnits(gasPrice, "gwei")} gwei`); + + let l2DaValidatorAddress; + let l1DaValidatorAddress; + if (cmd.validiumMode) { + l2DaValidatorAddress = await deployContractOnL2ThroughL1(deployer, "ValidiumL2DAValidator", chainId, gasPrice); + l1DaValidatorAddress = deployer.addresses.ValidiumL1DAValidator; + } else { + l2DaValidatorAddress = await deployContractOnL2ThroughL1(deployer, "RollupL2DAValidator", chainId, gasPrice); + l1DaValidatorAddress = deployer.addresses.RollupL1DAValidator; + } + + console.log(`CONTRACTS_L1_DA_VALIDATOR_ADDR=${l1DaValidatorAddress}`); + console.log(`CONTRACTS_L2_DA_VALIDATOR_ADDR=${l2DaValidatorAddress}`); + + const adminFacetInterface = new AdminFacetFactory().interface; + + console.log("Setting the DA Validator pair on diamond proxy"); + console.log("Who is called: ", deployer.addresses.StateTransition.DiamondProxy); + await deployer.executeChainAdminMulticall([ + { + target: deployer.addresses.StateTransition.DiamondProxy, + data: adminFacetInterface.encodeFunctionData("setDAValidatorPair", [ + l1DaValidatorAddress, + l2DaValidatorAddress, + ]), + value: 0, + }, + ]); + }); + + await program.parseAsync(process.argv); +} + +main() + .then(() => process.exit(0)) + .catch((err) => { + console.error("Error:", err); + process.exit(1); + }); diff --git a/l2-contracts/src/deploy-shared-bridge-on-l2-through-l1.ts b/l2-contracts/src/deploy-shared-bridge-on-l2-through-l1.ts deleted file mode 100644 index 5bf18af74..000000000 --- a/l2-contracts/src/deploy-shared-bridge-on-l2-through-l1.ts +++ /dev/null @@ -1,274 +0,0 @@ -import { Command } from "commander"; -import type { BigNumberish } from "ethers"; -import { ethers, Wallet } from "ethers"; -import { formatUnits, Interface, parseUnits, defaultAbiCoder } from "ethers/lib/utils"; -import { - computeL2Create2Address, - create2DeployFromL1, - provider, - priorityTxMaxGasLimit, - hashL2Bytecode, - applyL1ToL2Alias, - publishBytecodeFromL1, -} from "./utils"; - -import { ethTestConfig } from "./deploy-utils"; - -import { Deployer } from "../../l1-contracts/src.ts/deploy"; -import { GAS_MULTIPLIER } from "../../l1-contracts/scripts/utils"; -import * as hre from "hardhat"; - -export const L2_SHARED_BRIDGE_ABI = hre.artifacts.readArtifactSync("L2SharedBridge").abi; -export const L2_STANDARD_TOKEN_PROXY_BYTECODE = hre.artifacts.readArtifactSync("BeaconProxy").bytecode; - -export async function publishL2SharedBridgeDependencyBytecodesOnL2( - deployer: Deployer, - chainId: string, - gasPrice: BigNumberish -) { - if (deployer.verbose) { - console.log("Providing necessary L2 bytecodes"); - } - - const L2_STANDARD_ERC20_PROXY_FACTORY_BYTECODE = hre.artifacts.readArtifactSync("UpgradeableBeacon").bytecode; - const L2_STANDARD_ERC20_IMPLEMENTATION_BYTECODE = hre.artifacts.readArtifactSync("L2StandardERC20").bytecode; - - await publishBytecodeFromL1( - chainId, - deployer.deployWallet, - [ - L2_STANDARD_ERC20_PROXY_FACTORY_BYTECODE, - L2_STANDARD_ERC20_IMPLEMENTATION_BYTECODE, - L2_STANDARD_TOKEN_PROXY_BYTECODE, - ], - gasPrice - ); - - if (deployer.verbose) { - console.log("Bytecodes published on L2"); - } -} - -export async function deploySharedBridgeImplOnL2ThroughL1( - deployer: Deployer, - chainId: string, - gasPrice: BigNumberish, - localLegacyBridgeTesting: boolean = false -) { - if (deployer.verbose) { - console.log("Deploying L2SharedBridge Implementation"); - } - const eraChainId = process.env.CONTRACTS_ERA_CHAIN_ID; - - const l2SharedBridgeImplementationBytecode = localLegacyBridgeTesting - ? hre.artifacts.readArtifactSync("DevL2SharedBridge").bytecode - : hre.artifacts.readArtifactSync("L2SharedBridge").bytecode; - - if (!l2SharedBridgeImplementationBytecode) { - throw new Error("l2SharedBridgeImplementationBytecode not found"); - } - if (deployer.verbose) { - console.log("l2SharedBridgeImplementationBytecode loaded"); - - console.log("Computing L2SharedBridge Implementation Address"); - } - const l2SharedBridgeImplAddress = computeL2Create2Address( - deployer.deployWallet, - l2SharedBridgeImplementationBytecode, - defaultAbiCoder.encode(["uint256"], [eraChainId]), - ethers.constants.HashZero - ); - deployer.addresses.Bridges.L2SharedBridgeImplementation = l2SharedBridgeImplAddress; - if (deployer.verbose) { - console.log(`L2SharedBridge Implementation Address: ${l2SharedBridgeImplAddress}`); - - console.log("Deploying L2SharedBridge Implementation"); - } - - /// L2StandardTokenProxy bytecode. We need this bytecode to be accessible on the L2, it is enough to add to factoryDeps - const L2_STANDARD_TOKEN_PROXY_BYTECODE = hre.artifacts.readArtifactSync("BeaconProxy").bytecode; - - // TODO: request from API how many L2 gas needs for the transaction. - const tx2 = await create2DeployFromL1( - chainId, - deployer.deployWallet, - l2SharedBridgeImplementationBytecode, - defaultAbiCoder.encode(["uint256"], [eraChainId]), - ethers.constants.HashZero, - priorityTxMaxGasLimit, - gasPrice, - [L2_STANDARD_TOKEN_PROXY_BYTECODE] - ); - - await tx2.wait(); - if (deployer.verbose) { - console.log("Deployed L2SharedBridge Implementation"); - console.log(`CONTRACTS_L2_SHARED_BRIDGE_IMPL_ADDR=${l2SharedBridgeImplAddress}`); - } -} - -export async function deploySharedBridgeProxyOnL2ThroughL1( - deployer: Deployer, - chainId: string, - gasPrice: BigNumberish, - localLegacyBridgeTesting: boolean = false -) { - const l1SharedBridge = deployer.defaultSharedBridge(deployer.deployWallet); - if (deployer.verbose) { - console.log("Deploying L2SharedBridge Proxy"); - } - /// prepare proxyInitializationParams - const l2GovernorAddress = applyL1ToL2Alias(deployer.addresses.Governance); - - let proxyInitializationParams; - if (localLegacyBridgeTesting) { - const l2SharedBridgeInterface = new Interface(hre.artifacts.readArtifactSync("DevL2SharedBridge").abi); - proxyInitializationParams = l2SharedBridgeInterface.encodeFunctionData("initializeDevBridge", [ - l1SharedBridge.address, - deployer.addresses.Bridges.ERC20BridgeProxy, - hashL2Bytecode(L2_STANDARD_TOKEN_PROXY_BYTECODE), - l2GovernorAddress, - ]); - } else { - const l2SharedBridgeInterface = new Interface(hre.artifacts.readArtifactSync("L2SharedBridge").abi); - proxyInitializationParams = l2SharedBridgeInterface.encodeFunctionData("initialize", [ - l1SharedBridge.address, - deployer.addresses.Bridges.ERC20BridgeProxy, - hashL2Bytecode(L2_STANDARD_TOKEN_PROXY_BYTECODE), - l2GovernorAddress, - ]); - } - - /// prepare constructor data - const l2SharedBridgeProxyConstructorData = ethers.utils.arrayify( - new ethers.utils.AbiCoder().encode( - ["address", "address", "bytes"], - [deployer.addresses.Bridges.L2SharedBridgeImplementation, l2GovernorAddress, proxyInitializationParams] - ) - ); - - /// loading TransparentUpgradeableProxy bytecode - const L2_SHARED_BRIDGE_PROXY_BYTECODE = hre.artifacts.readArtifactSync("TransparentUpgradeableProxy").bytecode; - - /// compute L2SharedBridgeProxy address - const l2SharedBridgeProxyAddress = computeL2Create2Address( - deployer.deployWallet, - L2_SHARED_BRIDGE_PROXY_BYTECODE, - l2SharedBridgeProxyConstructorData, - ethers.constants.HashZero - ); - deployer.addresses.Bridges.L2SharedBridgeProxy = l2SharedBridgeProxyAddress; - - /// deploy L2SharedBridgeProxy - // TODO: request from API how many L2 gas needs for the transaction. - const tx3 = await create2DeployFromL1( - chainId, - deployer.deployWallet, - L2_SHARED_BRIDGE_PROXY_BYTECODE, - l2SharedBridgeProxyConstructorData, - ethers.constants.HashZero, - priorityTxMaxGasLimit, - gasPrice - ); - await tx3.wait(); - if (deployer.verbose) { - console.log(`CONTRACTS_L2_SHARED_BRIDGE_ADDR=${l2SharedBridgeProxyAddress}`); - } -} - -export async function initializeChainGovernance(deployer: Deployer, chainId: string) { - const l1SharedBridge = deployer.defaultSharedBridge(deployer.deployWallet); - - if (deployer.verbose) { - console.log("Initializing chain governance"); - } - await deployer.executeUpgrade( - l1SharedBridge.address, - 0, - l1SharedBridge.interface.encodeFunctionData("initializeChainGovernance", [ - chainId, - deployer.addresses.Bridges.L2SharedBridgeProxy, - ]) - ); - - if (deployer.verbose) { - console.log("L2 shared bridge address registered on L1 via governance"); - } -} - -export async function deploySharedBridgeOnL2ThroughL1( - deployer: Deployer, - chainId: string, - gasPrice: BigNumberish, - localLegacyBridgeTesting: boolean, - skipInitializeChainGovernance: boolean -) { - await publishL2SharedBridgeDependencyBytecodesOnL2(deployer, chainId, gasPrice); - await deploySharedBridgeImplOnL2ThroughL1(deployer, chainId, gasPrice, localLegacyBridgeTesting); - await deploySharedBridgeProxyOnL2ThroughL1(deployer, chainId, gasPrice, localLegacyBridgeTesting); - if (!skipInitializeChainGovernance) { - await initializeChainGovernance(deployer, chainId); - } -} - -async function main() { - const program = new Command(); - - program.version("0.1.0").name("deploy-shared-bridge-on-l2-through-l1"); - - program - .option("--private-key ") - .option("--chain-id ") - .option("--local-legacy-bridge-testing") - .option("--gas-price ") - .option("--nonce ") - .option("--erc20-bridge ") - .option("--skip-initialize-chain-governance ") - .action(async (cmd) => { - const chainId: string = cmd.chainId ? cmd.chainId : process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID; - const deployWallet = cmd.privateKey - ? new Wallet(cmd.privateKey, provider) - : Wallet.fromMnemonic( - process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic, - "m/44'/60'/0'/0/1" - ).connect(provider); - console.log(`Using deployer wallet: ${deployWallet.address}`); - - const deployer = new Deployer({ - deployWallet, - ownerAddress: deployWallet.address, - verbose: true, - }); - - const nonce = cmd.nonce ? parseInt(cmd.nonce) : await deployer.deployWallet.getTransactionCount(); - console.log(`Using nonce: ${nonce}`); - - const gasPrice = cmd.gasPrice - ? parseUnits(cmd.gasPrice, "gwei") - : (await provider.getGasPrice()).mul(GAS_MULTIPLIER); - console.log(`Using gas price: ${formatUnits(gasPrice, "gwei")} gwei`); - - const skipInitializeChainGovernance = - !!cmd.skipInitializeChainGovernance && cmd.skipInitializeChainGovernance === "true"; - if (skipInitializeChainGovernance) { - console.log("Initialization of the chain governance will be skipped"); - } - - await deploySharedBridgeOnL2ThroughL1( - deployer, - chainId, - gasPrice, - cmd.localLegacyBridgeTesting, - skipInitializeChainGovernance - ); - }); - - await program.parseAsync(process.argv); -} - -main() - .then(() => process.exit(0)) - .catch((err) => { - console.error("Error:", err); - process.exit(1); - }); diff --git a/l2-contracts/src/update-l2-erc20-metadata.ts b/l2-contracts/src/update-l2-erc20-metadata.ts index b86589df3..24903acd8 100644 --- a/l2-contracts/src/update-l2-erc20-metadata.ts +++ b/l2-contracts/src/update-l2-erc20-metadata.ts @@ -2,7 +2,7 @@ import * as hre from "hardhat"; import "@nomiclabs/hardhat-ethers"; import { Command } from "commander"; import { Wallet, ethers, BigNumber } from "ethers"; -import { Provider } from "zksync-web3"; +import { Provider } from "zksync-ethers"; import { getNumberFromEnv } from "../../l1-contracts/src.ts/utils"; import { web3Provider } from "../../l1-contracts/scripts/utils"; import { Deployer } from "../../l1-contracts/src.ts/deploy"; diff --git a/l2-contracts/src/upgrade-bridge-impl.ts b/l2-contracts/src/upgrade-bridge-impl.ts index 99c7475c7..3d8e77da9 100644 --- a/l2-contracts/src/upgrade-bridge-impl.ts +++ b/l2-contracts/src/upgrade-bridge-impl.ts @@ -6,8 +6,8 @@ import { Command } from "commander"; import { BigNumber, Wallet, ethers } from "ethers"; import * as fs from "fs"; import * as path from "path"; -import { Provider } from "zksync-web3"; -import { REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT } from "zksync-web3/build/src/utils"; +import { Provider } from "zksync-ethers"; +import { REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT } from "zksync-ethers/build/utils"; import { web3Provider } from "../../l1-contracts/scripts/utils"; import { getAddressFromEnv, getNumberFromEnv } from "../../l1-contracts/src.ts/utils"; import { Deployer } from "../../l1-contracts/src.ts/deploy"; @@ -352,7 +352,7 @@ async function main() { REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT ); - console.log(`Base cost for priority tx with max ergs: ${ethers.utils.formatEther(neededValue)} ETH`); + console.log(`Base cost for priority tx with max gas: ${ethers.utils.formatEther(neededValue)} ETH`); }); await program.parseAsync(process.argv); diff --git a/l2-contracts/src/upgrade-consistency-checker.ts b/l2-contracts/src/upgrade-consistency-checker.ts index 8bebe197d..da2ebcc29 100644 --- a/l2-contracts/src/upgrade-consistency-checker.ts +++ b/l2-contracts/src/upgrade-consistency-checker.ts @@ -10,7 +10,7 @@ import { Provider } from "zksync-ethers"; // Things that still have to be manually double checked: // 1. Contracts must be verified. -// 2. Getter methods in STM. +// 2. Getter methods in CTM. // List the contracts that should become the upgrade targets const l2BridgeImplAddr = "0x470afaacce2acdaefcc662419b74c79d76c914ae"; diff --git a/l2-contracts/src/utils.ts b/l2-contracts/src/utils.ts index b4e7d5c1e..fc89ba668 100644 --- a/l2-contracts/src/utils.ts +++ b/l2-contracts/src/utils.ts @@ -9,11 +9,11 @@ import { web3Provider } from "../../l1-contracts/scripts/utils"; import type { BigNumber, BytesLike, Wallet } from "ethers"; import { ethers } from "ethers"; -import type { Provider } from "zksync-web3"; -import { REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT, sleep } from "zksync-web3/build/src/utils"; -import { IERC20Factory } from "zksync-web3/build/typechain"; +import type { Provider } from "zksync-ethers"; +import { REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT, sleep } from "zksync-ethers/build/utils"; -import { ERC20Factory } from "../../l1-contracts/typechain"; +import { IERC20Factory } from "../typechain/IERC20Factory"; +import { IL1NativeTokenVaultFactory } from "../../l1-contracts/typechain/IL1NativeTokenVaultFactory"; export const provider = web3Provider(); @@ -103,13 +103,39 @@ export async function create2DeployFromL1( gasPrice?: ethers.BigNumberish, extraFactoryDeps?: ethers.BytesLike[] ) { - const bridgehubAddress = deployedAddressesFromEnv().Bridgehub.BridgehubProxy; - const bridgehub = IBridgehubFactory.connect(bridgehubAddress, wallet); - - const deployerSystemContracts = new Interface(artifacts.readArtifactSync("IContractDeployer").abi); + const deployerSystemContracts = new Interface( + artifacts.readArtifactSync("contracts/L2ContractHelper.sol:IContractDeployer").abi + ); const bytecodeHash = hashL2Bytecode(bytecode); const calldata = deployerSystemContracts.encodeFunctionData("create2", [create2Salt, bytecodeHash, constructor]); + + const factoryDeps = extraFactoryDeps ? [bytecode, ...extraFactoryDeps] : [bytecode]; + return await requestL2TransactionDirect( + chainId, + wallet, + DEPLOYER_SYSTEM_CONTRACT_ADDRESS, + calldata, + l2GasLimit, + gasPrice, + factoryDeps + ); +} + +export async function requestL2TransactionDirect( + chainId: ethers.BigNumberish, + wallet: ethers.Wallet, + l2Contract: string, + calldata: string, + l2GasLimit: ethers.BigNumberish, + gasPrice?: ethers.BigNumberish, + factoryDeps?: ethers.BytesLike[] +) { + const deployedAddresses = deployedAddressesFromEnv(); + const bridgehubAddress = deployedAddresses.Bridgehub.BridgehubProxy; + const bridgehub = IBridgehubFactory.connect(bridgehubAddress, wallet); + const ntv = IL1NativeTokenVaultFactory.connect(deployedAddresses.Bridges.NativeTokenVaultProxy, wallet); gasPrice ??= await bridgehub.provider.getGasPrice(); + const expectedCost = await bridgehub.l2TransactionBaseCost( chainId, gasPrice, @@ -117,8 +143,9 @@ export async function create2DeployFromL1( REQUIRED_L2_GAS_PRICE_PER_PUBDATA ); - const baseTokenAddress = await bridgehub.baseToken(chainId); - const baseTokenBridge = deployedAddressesFromEnv().Bridges.SharedBridgeProxy; + const baseTokenAssetId = await bridgehub.baseTokenAssetId(chainId); + const baseTokenAddress = await ntv.tokenAddress(baseTokenAssetId); + const baseTokenBridge = deployedAddresses.Bridges.SharedBridgeProxy; const baseToken = IERC20Factory.connect(baseTokenAddress, wallet); const ethIsBaseToken = ADDRESS_ONE == baseTokenAddress; @@ -126,17 +153,16 @@ export async function create2DeployFromL1( const tx = await baseToken.approve(baseTokenBridge, expectedCost); await tx.wait(); } - const factoryDeps = extraFactoryDeps ? [bytecode, ...extraFactoryDeps] : [bytecode]; return await bridgehub.requestL2TransactionDirect( { chainId, - l2Contract: DEPLOYER_SYSTEM_CONTRACT_ADDRESS, + l2Contract: l2Contract, mintValue: expectedCost, l2Value: 0, l2Calldata: calldata, l2GasLimit, l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, - factoryDeps: factoryDeps, + factoryDeps: factoryDeps ?? [], refundRecipient: wallet.address, }, { value: ethIsBaseToken ? expectedCost : 0, gasPrice } @@ -170,44 +196,15 @@ export async function publishBytecodeFromL1( factoryDeps: ethers.BytesLike[], gasPrice?: ethers.BigNumberish ) { - const deployedAddresses = deployedAddressesFromEnv(); - const bridgehubAddress = deployedAddresses.Bridgehub.BridgehubProxy; - const bridgehub = IBridgehubFactory.connect(bridgehubAddress, wallet); - - const requiredValueToPublishBytecodes = await bridgehub.l2TransactionBaseCost( + return await requestL2TransactionDirect( chainId, - gasPrice, + wallet, + ethers.constants.AddressZero, + "0x", priorityTxMaxGasLimit, - REQUIRED_L2_GAS_PRICE_PER_PUBDATA - ); - - const baseToken = deployedAddresses.BaseToken; - const ethIsBaseToken = ADDRESS_ONE == baseToken; - if (!ethIsBaseToken) { - const erc20 = ERC20Factory.connect(baseToken, wallet); - - const approveTx = await erc20.approve( - deployedAddresses.Bridges.SharedBridgeProxy, - requiredValueToPublishBytecodes.add(requiredValueToPublishBytecodes) - ); - await approveTx.wait(1); - } - const nonce = await wallet.getTransactionCount(); - const tx1 = await bridgehub.requestL2TransactionDirect( - { - chainId, - l2Contract: ethers.constants.AddressZero, - mintValue: requiredValueToPublishBytecodes, - l2Value: 0, - l2Calldata: "0x", - l2GasLimit: priorityTxMaxGasLimit, - l2GasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, - factoryDeps: factoryDeps, - refundRecipient: wallet.address, - }, - { gasPrice, nonce, value: ethIsBaseToken ? requiredValueToPublishBytecodes : 0 } + gasPrice, + factoryDeps ); - await tx1.wait(); } export async function awaitPriorityOps( @@ -271,3 +268,9 @@ export async function getL1TxInfo( value: neededValue.toString(), }; } + +const LOCAL_NETWORKS = ["localhost", "hardhat", "localhostL2"]; + +export function isCurrentNetworkLocal(): boolean { + return LOCAL_NETWORKS.includes(process.env.CHAIN_ETH_NETWORK); +} diff --git a/l2-contracts/src/verify.ts b/l2-contracts/src/verify.ts index a45c83795..e69e974d9 100644 --- a/l2-contracts/src/verify.ts +++ b/l2-contracts/src/verify.ts @@ -1,5 +1,6 @@ // hardhat import should be the first import in the file import * as hardhat from "hardhat"; +import { isCurrentNetworkLocal } from "./utils"; // eslint-disable-next-line @typescript-eslint/no-explicit-any function verifyPromise(address: string, constructorArguments?: Array, libraries?: object): Promise { @@ -12,7 +13,7 @@ function verifyPromise(address: string, constructorArguments?: Array, libra } async function main() { - if (process.env.CHAIN_ETH_NETWORK == "localhost") { + if (isCurrentNetworkLocal()) { console.log("Skip contract verification on localhost"); return; } diff --git a/l2-contracts/test/erc20.test.ts b/l2-contracts/test/erc20.test.ts deleted file mode 100644 index 25dfba652..000000000 --- a/l2-contracts/test/erc20.test.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { Deployer } from "@matterlabs/hardhat-zksync-deploy"; -import { expect } from "chai"; -import { ethers } from "ethers"; -import * as hre from "hardhat"; -import { Provider, Wallet } from "zksync-web3"; -import { hashBytecode } from "zksync-web3/build/src/utils"; -import { unapplyL1ToL2Alias } from "./test-utils"; -import { L2SharedBridgeFactory, L2StandardERC20Factory } from "../typechain"; -import type { L2SharedBridge, L2StandardERC20 } from "../typechain"; - -const richAccount = [ - { - address: "0x36615Cf349d7F6344891B1e7CA7C72883F5dc049", - privateKey: "0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110", - }, - { - address: "0xa61464658AfeAf65CccaaFD3a512b69A83B77618", - privateKey: "0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3", - }, - { - address: "0x0D43eB5B8a47bA8900d84AA36656c92024e9772e", - privateKey: "0xd293c684d884d56f8d6abd64fc76757d3664904e309a0645baf8522ab6366d9e", - }, -]; - -describe("ERC20Bridge", function () { - const provider = new Provider(hre.config.networks.localhost.url); - const deployerWallet = new Wallet(richAccount[0].privateKey, provider); - const governorWallet = new Wallet(richAccount[1].privateKey, provider); - - // We need to emulate a L1->L2 transaction from the L1 bridge to L2 counterpart. - // It is a bit easier to use EOA and it is sufficient for the tests. - const l1BridgeWallet = new Wallet(richAccount[2].privateKey, provider); - - // We won't actually deploy an L1 token in these tests, but we need some address for it. - const L1_TOKEN_ADDRESS = "0x1111000000000000000000000000000000001111"; - - const testChainId = 9; - - let erc20Bridge: L2SharedBridge; - let erc20Token: L2StandardERC20; - - before("Deploy token and bridge", async function () { - const deployer = new Deployer(hre, deployerWallet); - - // While we formally don't need to deploy the token and the beacon proxy, it is a neat way to have the bytecode published - const l2TokenImplAddress = await deployer.deploy(await deployer.loadArtifact("L2StandardERC20")); - const l2Erc20TokenBeacon = await deployer.deploy(await deployer.loadArtifact("UpgradeableBeacon"), [ - l2TokenImplAddress.address, - ]); - await deployer.deploy(await deployer.loadArtifact("BeaconProxy"), [l2Erc20TokenBeacon.address, "0x"]); - - const beaconProxyBytecodeHash = hashBytecode((await deployer.loadArtifact("BeaconProxy")).bytecode); - - const erc20BridgeImpl = await deployer.deploy(await deployer.loadArtifact("L2SharedBridge"), [testChainId]); - const bridgeInitializeData = erc20BridgeImpl.interface.encodeFunctionData("initialize", [ - unapplyL1ToL2Alias(l1BridgeWallet.address), - ethers.constants.AddressZero, - beaconProxyBytecodeHash, - governorWallet.address, - ]); - - const erc20BridgeProxy = await deployer.deploy(await deployer.loadArtifact("TransparentUpgradeableProxy"), [ - erc20BridgeImpl.address, - governorWallet.address, - bridgeInitializeData, - ]); - - erc20Bridge = L2SharedBridgeFactory.connect(erc20BridgeProxy.address, deployerWallet); - }); - - it("Should finalize deposit ERC20 deposit", async function () { - const erc20BridgeWithL1Bridge = L2SharedBridgeFactory.connect(erc20Bridge.address, l1BridgeWallet); - - const l1Depositor = ethers.Wallet.createRandom(); - const l2Receiver = ethers.Wallet.createRandom(); - - const tx = await ( - await erc20BridgeWithL1Bridge.finalizeDeposit( - // Depositor and l2Receiver can be any here - l1Depositor.address, - l2Receiver.address, - L1_TOKEN_ADDRESS, - 100, - encodedTokenData("TestToken", "TT", 18) - ) - ).wait(); - - const l2TokenAddress = tx.events.find((event) => event.event === "FinalizeDeposit").args.l2Token; - - // Checking the correctness of the balance: - erc20Token = L2StandardERC20Factory.connect(l2TokenAddress, deployerWallet); - expect(await erc20Token.balanceOf(l2Receiver.address)).to.equal(100); - expect(await erc20Token.totalSupply()).to.equal(100); - expect(await erc20Token.name()).to.equal("TestToken"); - expect(await erc20Token.symbol()).to.equal("TT"); - expect(await erc20Token.decimals()).to.equal(18); - }); - - it("Governance should be able to reinitialize the token", async () => { - const erc20TokenWithGovernor = L2StandardERC20Factory.connect(erc20Token.address, governorWallet); - - await ( - await erc20TokenWithGovernor.reinitializeToken( - { - ignoreName: false, - ignoreSymbol: false, - ignoreDecimals: false, - }, - "TestTokenNewName", - "TTN", - 2 - ) - ).wait(); - - expect(await erc20Token.name()).to.equal("TestTokenNewName"); - expect(await erc20Token.symbol()).to.equal("TTN"); - // The decimals should stay the same - expect(await erc20Token.decimals()).to.equal(18); - }); - - it("Governance should not be able to skip initializer versions", async () => { - const erc20TokenWithGovernor = L2StandardERC20Factory.connect(erc20Token.address, governorWallet); - - await expect( - erc20TokenWithGovernor.reinitializeToken( - { - ignoreName: false, - ignoreSymbol: false, - ignoreDecimals: false, - }, - "TestTokenNewName", - "TTN", - 20, - { gasLimit: 10000000 } - ) - ).to.be.reverted; - }); -}); - -function encodedTokenData(name: string, symbol: string, decimals: number) { - const abiCoder = ethers.utils.defaultAbiCoder; - const encodedName = abiCoder.encode(["string"], [name]); - const encodedSymbol = abiCoder.encode(["string"], [symbol]); - const encodedDecimals = abiCoder.encode(["uint8"], [decimals]); - - return abiCoder.encode(["bytes", "bytes", "bytes"], [encodedName, encodedSymbol, encodedDecimals]); -} diff --git a/l2-contracts/test/foundry/unit/data-availability/RollupL2DAValidator.t.sol b/l2-contracts/test/foundry/unit/data-availability/RollupL2DAValidator.t.sol new file mode 100644 index 000000000..5a56e7118 --- /dev/null +++ b/l2-contracts/test/foundry/unit/data-availability/RollupL2DAValidator.t.sol @@ -0,0 +1,154 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; + +import {TestStateDiffComposer} from "./TestStateDiffComposer.sol"; + +import {RollupL2DAValidator} from "contracts/data-availability/RollupL2DAValidator.sol"; +import {STATE_DIFF_ENTRY_SIZE} from "contracts/data-availability/StateDiffL2DAValidator.sol"; +import {ReconstructionMismatch, PubdataField} from "contracts/data-availability/DAErrors.sol"; + +import {COMPRESSOR_CONTRACT, PUBDATA_CHUNK_PUBLISHER} from "contracts/L2ContractHelper.sol"; + +import {console2 as console} from "forge-std/Script.sol"; + +contract RollupL2DAValidatorTest is Test { + RollupL2DAValidator internal l2DAValidator; + TestStateDiffComposer internal composer; + + function setUp() public { + l2DAValidator = new RollupL2DAValidator(); + composer = new TestStateDiffComposer(); + + bytes memory emptyArray = new bytes(0); + + // Setting dummy state diffs, so it works fine. + composer.setDummyStateDiffs(1, 0, 64, emptyArray, 0, emptyArray); + + bytes memory verifyCompressedStateDiffsData = abi.encodeCall( + COMPRESSOR_CONTRACT.verifyCompressedStateDiffs, + (0, 64, emptyArray, emptyArray) + ); + vm.mockCall(address(COMPRESSOR_CONTRACT), verifyCompressedStateDiffsData, new bytes(32)); + + bytes memory chunkPubdataToBlobsData = abi.encodeCall( + PUBDATA_CHUNK_PUBLISHER.chunkPubdataToBlobs, + (emptyArray) + ); + vm.mockCall(address(PUBDATA_CHUNK_PUBLISHER), chunkPubdataToBlobsData, new bytes(32)); + } + + function finalizeAndCall(bytes memory revertMessage) internal returns (bytes32) { + bytes32 rollingMessagesHash = composer.correctRollingMessagesHash(); + bytes32 rollingBytecodeHash = composer.correctRollingBytecodesHash(); + bytes memory totalL2ToL1PubdataAndStateDiffs = composer.generateTotalStateDiffsAndPubdata(); + + if (revertMessage.length > 0) { + vm.expectRevert(revertMessage); + } + return + l2DAValidator.validatePubdata( + bytes32(0), + bytes32(0), + rollingMessagesHash, + rollingBytecodeHash, + totalL2ToL1PubdataAndStateDiffs + ); + } + + function test_incorrectChainMessagesHash() public { + composer.appendAMessage("message", true, false); + + bytes memory revertMessage = abi.encodeWithSelector( + ReconstructionMismatch.selector, + PubdataField.MsgHash, + composer.correctRollingMessagesHash(), + composer.currentRollingMessagesHash() + ); + finalizeAndCall(revertMessage); + } + + function test_incorrectChainBytecodeHash() public { + composer.appendBytecode(new bytes(32), true, false); + + bytes memory revertMessage = abi.encodeWithSelector( + ReconstructionMismatch.selector, + PubdataField.Bytecode, + composer.correctRollingBytecodesHash(), + composer.currentRollingBytecodesHash() + ); + finalizeAndCall(revertMessage); + } + + function test_incorrectStateDiffVersion() public { + composer.setDummyStateDiffs(2, 0, 64, new bytes(0), 0, new bytes(0)); + + bytes memory revertMessage = abi.encodeWithSelector( + ReconstructionMismatch.selector, + PubdataField.StateDiffCompressionVersion, + bytes32(uint256(1)), + bytes32(uint256(2)) + ); + finalizeAndCall(revertMessage); + } + + function test_nonZeroLeftOver() public { + composer.setDummyStateDiffs(1, 0, 64, new bytes(0), 0, new bytes(32)); + + bytes memory revertMessage = abi.encodeWithSelector( + ReconstructionMismatch.selector, + PubdataField.ExtraData, + bytes32(0), + bytes32(uint256(32)) + ); + finalizeAndCall(revertMessage); + } + + function test_fullCorrectCompression() public { + composer.appendAMessage("message", true, true); + composer.appendBytecode(new bytes(32), true, true); + + uint256 numberOfStateDiffs = 1; + // Just some non-zero array, the structure does not matter here. + bytes memory compressedStateDiffs = new bytes(12); + bytes memory uncompressedStateDiffs = new bytes(STATE_DIFF_ENTRY_SIZE * numberOfStateDiffs); + + composer.setDummyStateDiffs( + 1, + uint24(compressedStateDiffs.length), + 64, + compressedStateDiffs, + uint32(numberOfStateDiffs), + uncompressedStateDiffs + ); + + bytes32 stateDiffsHash = keccak256(uncompressedStateDiffs); + bytes memory verifyCompressedStateDiffsData = abi.encodeCall( + COMPRESSOR_CONTRACT.verifyCompressedStateDiffs, + (numberOfStateDiffs, 64, uncompressedStateDiffs, compressedStateDiffs) + ); + vm.mockCall(address(COMPRESSOR_CONTRACT), verifyCompressedStateDiffsData, abi.encodePacked(stateDiffsHash)); + + bytes memory totalPubdata = composer.getTotalPubdata(); + bytes32 blobHash = keccak256(totalPubdata); + bytes32[] memory blobHashes = new bytes32[](1); + blobHashes[0] = blobHash; + bytes memory chunkPubdataToBlobsData = abi.encodeCall( + PUBDATA_CHUNK_PUBLISHER.chunkPubdataToBlobs, + (totalPubdata) + ); + vm.mockCall(address(PUBDATA_CHUNK_PUBLISHER), chunkPubdataToBlobsData, abi.encode(blobHashes)); + + bytes32 operatorDAHash = finalizeAndCall(new bytes(0)); + + bytes32 expectedOperatorDAHash = keccak256( + abi.encodePacked(stateDiffsHash, keccak256(totalPubdata), uint8(blobHashes.length), blobHashes) + ); + + assertEq(operatorDAHash, expectedOperatorDAHash); + } +} diff --git a/l2-contracts/test/foundry/unit/data-availability/TestStateDiffComposer.sol b/l2-contracts/test/foundry/unit/data-availability/TestStateDiffComposer.sol new file mode 100644 index 000000000..05c639818 --- /dev/null +++ b/l2-contracts/test/foundry/unit/data-availability/TestStateDiffComposer.sol @@ -0,0 +1,97 @@ +// SPDX-License-Identifier: MIT + +import {L2_TO_L1_LOG_SERIALIZE_SIZE} from "contracts/data-availability/StateDiffL2DAValidator.sol"; + +import {L2ContractHelper} from "contracts/L2ContractHelper.sol"; + +/// @notice The contract that is used in testing to compose the pubdata needed for the +/// state diff DA validator. +contract TestStateDiffComposer { + // The following two are always correct + // as these qre expected to be already checked by the L1Messenger + uint256 internal logsNumber; + bytes internal logs; + + uint256 internal messagesNumber; + bytes internal messages; + bytes32 public currentRollingMessagesHash; + bytes32 public correctRollingMessagesHash; + + uint256 internal bytecodesNumber; + bytes internal bytecodes; + bytes32 public currentRollingBytecodesHash; + bytes32 public correctRollingBytecodesHash; + + bytes internal uncomressedStateDiffsPart; + bytes internal compressedStateDiffsPart; + + function appendALog() public { + // This function is not fully implemented, i.e. we do not insert the correct + // content of the log. The reason for that is that it is not needed for the + // testing + + ++logsNumber; + logs = bytes.concat(logs, new bytes(L2_TO_L1_LOG_SERIALIZE_SIZE)); + } + + function appendAMessage(bytes memory message, bool includeToArray, bool includeToCorrectHash) public { + if (includeToArray) { + ++messagesNumber; + messages = bytes.concat(messages, bytes4(uint32(message.length)), message); + currentRollingMessagesHash = keccak256(abi.encode(currentRollingMessagesHash, keccak256(message))); + } + + if (includeToCorrectHash) { + correctRollingMessagesHash = keccak256(abi.encode(correctRollingMessagesHash, keccak256(message))); + } + } + + function appendBytecode(bytes memory bytecode, bool includeToArray, bool includeToCorrectHash) public { + if (includeToArray) { + ++bytecodesNumber; + bytecodes = bytes.concat(bytecodes, bytes4(uint32(bytecode.length)), bytecode); + currentRollingBytecodesHash = keccak256( + abi.encode(currentRollingBytecodesHash, L2ContractHelper.hashL2BytecodeMemory(bytecode)) + ); + } + if (includeToCorrectHash) { + correctRollingBytecodesHash = keccak256( + abi.encode(correctRollingBytecodesHash, L2ContractHelper.hashL2BytecodeMemory(bytecode)) + ); + } + } + + function setDummyStateDiffs( + uint8 _version, + uint24 _compressedStateDiffSize, + uint8 _enumIndexSize, + bytes memory _compressedStateDiffs, + uint32 _numberOfStateDiffs, + bytes memory _stateDiffs + ) public { + compressedStateDiffsPart = abi.encodePacked( + _version, + _compressedStateDiffSize, + _enumIndexSize, + _compressedStateDiffs + ); + + uncomressedStateDiffsPart = abi.encodePacked(_numberOfStateDiffs, _stateDiffs); + } + + function getTotalPubdata() public returns (bytes memory _totalPubdata) { + _totalPubdata = abi.encodePacked( + uint32(logsNumber), + logs, + uint32(messagesNumber), + messages, + uint32(bytecodesNumber), + bytecodes, + compressedStateDiffsPart + ); + } + + function generateTotalStateDiffsAndPubdata() public returns (bytes memory _totalL2ToL1PubdataAndStateDiffs) { + _totalL2ToL1PubdataAndStateDiffs = abi.encodePacked(getTotalPubdata(), uncomressedStateDiffsPart); + } +} diff --git a/l2-contracts/test/foundry/unit/data-availability/ValidiumL2DAValidator.t.sol b/l2-contracts/test/foundry/unit/data-availability/ValidiumL2DAValidator.t.sol new file mode 100644 index 000000000..3374e1acc --- /dev/null +++ b/l2-contracts/test/foundry/unit/data-availability/ValidiumL2DAValidator.t.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.20; + +// solhint-disable gas-custom-errors + +import {Test} from "forge-std/Test.sol"; + +import {ValidiumL2DAValidator} from "contracts/data-availability/ValidiumL2DAValidator.sol"; + +contract L2ValidiumDAValidatorTest is Test { + function test_callValidiumDAValidator(address depositor, address receiver, uint256 amount) internal { + ValidiumL2DAValidator validator = new ValidiumL2DAValidator(); + + bytes32 outputHash = validator.validatePubdata(bytes32(0), bytes32(0), bytes32(0), bytes32(0), hex""); + + assertEq(outputHash, bytes32(0)); + } +} diff --git a/l2-contracts/test/foundry/unit/verifier/Verifier.t.sol b/l2-contracts/test/foundry/unit/verifier/Verifier.t.sol new file mode 100644 index 000000000..39b7ad944 --- /dev/null +++ b/l2-contracts/test/foundry/unit/verifier/Verifier.t.sol @@ -0,0 +1,199 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {Test} from "forge-std/Test.sol"; + +import {Script, console2 as console} from "forge-std/Script.sol"; + +import {Verifier} from "contracts/verifier/Verifier.sol"; +import {VerifierTest} from "contracts/dev-contracts/VerifierTest.sol"; + +contract VerifierCaller { + Verifier public verifier; + + constructor(Verifier _verifier) { + verifier = _verifier; + } + + function verify( + uint256[] memory publicInputs, + uint256[] memory serializedProof + ) public view returns (bool result, uint256 gasUsed) { + uint256 gasBefore = gasleft(); + result = verifier.verify(publicInputs, serializedProof); + gasUsed = gasBefore - gasleft(); + } +} + +contract VerifierTestTest is Test { + uint256 Q_MOD = 21888242871839275222246405745257275088696311157297823662689037894645226208583; + uint256 R_MOD = 21888242871839275222246405745257275088548364400416034343698204186575808495617; + + uint256[] public publicInputs; + uint256[] public serializedProof; + + Verifier public verifier; + + function setUp() public virtual { + publicInputs.push(17257057577815541751225964212897374444694342989384539141520877492729); + + serializedProof.push(10032255692304426541958487424837706541667730769782503366592797609781788557424); + serializedProof.push(11856023086316274558845067687080284266010851703055534566998849536424959073766); + serializedProof.push(1946976494418613232642071265529572704802622739887191787991738703483400525159); + serializedProof.push(1328106069458824013351862477593422369726189688844441245167676630500797673929); + serializedProof.push(15488976127650523079605218040232167291115155239002840072043251018873550258833); + serializedProof.push(4352460820258659596860226525221943504756149602617718032378962471842121872064); + serializedProof.push(10499239305859992443759785453270906003243074359959242371675950941500942473773); + serializedProof.push(21347231097799123231227724221565041889687686131480556177475242020711996173235); + serializedProof.push(21448274562455512652922184359722637546669181231038098300951155169465175447933); + serializedProof.push(5224615512030263722410009061780530125927659699046094954022444377569738464640); + serializedProof.push(457781538876079938778845275495204146302569607395268192839148474821758081582); + serializedProof.push(18861735728246155975127314860333796285284072325207684293054713266899263027595); + serializedProof.push(16303944945368742900183889655415585360236645961122617249176044814801835577336); + serializedProof.push(13035945439947210396602249585896632733250124877036427100939804737514358838409); + serializedProof.push(5344210729159253547334947774998425118220137275601995670629358314205854915831); + serializedProof.push(5798533246034358556434877465898581616792677631188370022078168611592512620805); + serializedProof.push(17389657286129893116489015409587246992530648956814855147744210777822507444908); + serializedProof.push(2287244647342394712608648573347732257083870498255199596324312699868511383792); + serializedProof.push(4008043766112513713076111464601725311991199944328610186851424132679188418647); + serializedProof.push(1192776719848445147414966176395169615865534126881763324071908049917030138759); + serializedProof.push(21297794452895123333253856666749932934399762330444876027734824957603009458926); + serializedProof.push(17125994169200693606182326100834606153690416627082476471630567824088261322122); + serializedProof.push(13696978282153979214307382954559709118587582183649354744253374201589715565327); + serializedProof.push(19885518441500677676836488338931187143852666523909650686513498826535451677070); + serializedProof.push(1205434280320863211046275554464591162919269140938371417889032165323835178587); + serializedProof.push(17633172995805911347980792921300006225132501482343225088847242025756974009163); + serializedProof.push(16438080406761371143473961144300947125022788905488819913014533292593141026205); + serializedProof.push(5069081552536259237104332491140391551180511112980430307676595350165020188468); + serializedProof.push(21217317205917200275887696442048162383709998732382676029165079037795626916156); + serializedProof.push(19474466610515117278975027596198570980840609656738255347763182823792179771539); + serializedProof.push(9744176601826774967534277982058590459006781888895542911226406188087317156914); + serializedProof.push(13171230402193025939763214267878900142876558410430734782028402821166810894141); + serializedProof.push(11775403006142607980192261369108550982244126464568678337528680604943636677964); + serializedProof.push(6903612341636669639883555213872265187697278660090786759295896380793937349335); + serializedProof.push(10197105415769290664169006387603164525075746474380469980600306405504981186043); + serializedProof.push(10143152486514437388737642096964118742712576889537781270260677795662183637771); + serializedProof.push(7662095231333811948165764727904932118187491073896301295018543320499906824310); + serializedProof.push(929422796511992741418500336817719055655694499787310043166783539202506987065); + serializedProof.push(13837024938095280064325737989251964639823205065380219552242839155123572433059); + serializedProof.push(11738888513780631372636453609299803548810759208935038785934252961078387526204); + serializedProof.push(16528875312985292109940444015943812939751717229020635856725059316776921546668); + serializedProof.push(17525167117689648878398809303253004706004801107861280044640132822626802938868); + serializedProof.push(7419167499813234488108910149511390953153207250610705609008080038658070088540); + serializedProof.push(11628425014048216611195735618191126626331446742771562481735017471681943914146); + + verifier = new VerifierTest(); + } + + function testShouldVerify() public view { + bool success = verifier.verify(publicInputs, serializedProof); + assert(success); + } + + function testShouldVerifyWithGas() public { + // `gas snapshot` does not work well with zksync setup, so in order to obtain the amount of + // zkevm gas consumed we do the following: + // - Deploy a VerifierCaller contract, which would execute in zkevm context + // - Call the verify function from the VerifierCaller contract and return the gas used + + VerifierCaller caller = new VerifierCaller(verifier); + (bool success, uint256 gasUsed) = caller.verify(publicInputs, serializedProof); + assert(success); + + console.log("Gas used: %d", gasUsed); + } + + function testShouldVerifyWithDirtyBits() public view { + uint256[] memory newPublicInputs = publicInputs; + newPublicInputs[0] += uint256(bytes32(0xe000000000000000000000000000000000000000000000000000000000000000)); + + bool success = verifier.verify(newPublicInputs, serializedProof); + assert(success); + } + + function testEllipticCurvePointsOverModulo() public view { + uint256[] memory newSerializedProof = serializedProof; + newSerializedProof[0] += Q_MOD; + newSerializedProof[1] += Q_MOD; + newSerializedProof[1] += Q_MOD; + + bool success = verifier.verify(publicInputs, newSerializedProof); + assert(success); + } + + function testFrOverModulo() public view { + uint256[] memory newSerializedProof = serializedProof; + newSerializedProof[22] += R_MOD; + + bool success = verifier.verify(publicInputs, newSerializedProof); + assert(success); + } + + function testMoreThanOnePublicInput_shouldRevert() public { + uint256[] memory newPublicInputs = new uint256[](2); + newPublicInputs[0] = publicInputs[0]; + newPublicInputs[1] = publicInputs[0]; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(newPublicInputs, serializedProof); + } + + function testEmptyPublicInput_shouldRevert() public { + uint256[] memory newPublicInputs; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(newPublicInputs, serializedProof); + } + + function testMoreThan44WordsProof_shouldRevert() public { + uint256[] memory newSerializedProof = new uint256[](serializedProof.length + 1); + + for (uint256 i = 0; i < serializedProof.length; i++) { + newSerializedProof[i] = serializedProof[i]; + } + newSerializedProof[newSerializedProof.length - 1] = serializedProof[serializedProof.length - 1]; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testEmptyProof_shouldRevert() public { + uint256[] memory newSerializedProof; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testLongerProofInput_shouldRevert() public { + uint256[] memory newSerializedProof = new uint256[](serializedProof.length + 1); + for (uint256 i = 0; i < serializedProof.length; i++) { + newSerializedProof[i] = serializedProof[i]; + } + newSerializedProof[newSerializedProof.length - 1] = publicInputs[0]; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testEllipticCurvePointAtInfinity_shouldRevert() public { + uint256[] memory newSerializedProof = serializedProof; + newSerializedProof[0] = 0; + newSerializedProof[1] = 0; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testInvalidPublicInput_shouldRevert() public { + uint256[] memory newPublicInputs = publicInputs; + newPublicInputs[0] = 0; + + vm.expectRevert(bytes("invalid quotient evaluation")); + verifier.verify(newPublicInputs, serializedProof); + } + + function testVerificationKeyHash() public virtual { + bytes32 verificationKeyHash = verifier.verificationKeyHash(); + assertEq(verificationKeyHash, 0x6625fa96781746787b58306d414b1e25bd706d37d883a9b3acf57b2bd5e0de52); + } +} diff --git a/l2-contracts/test/foundry/unit/verifier/VerifierRecursive.t.sol b/l2-contracts/test/foundry/unit/verifier/VerifierRecursive.t.sol new file mode 100644 index 000000000..df43a07ed --- /dev/null +++ b/l2-contracts/test/foundry/unit/verifier/VerifierRecursive.t.sol @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.24; + +import {VerifierTestTest} from "./Verifier.t.sol"; +import {VerifierRecursiveTest} from "contracts/dev-contracts/VerifierRecursiveTest.sol"; + +contract VerifierRecursiveTestTest is VerifierTestTest { + function setUp() public override { + super.setUp(); + + serializedProof.push(2257920826825449939414463854743099397427742128922725774525544832270890253504); + serializedProof.push(9091218701914748532331969127001446391756173432977615061129552313204917562530); + serializedProof.push(16188304989094043810949359833767911976672882599560690320245309499206765021563); + serializedProof.push(3201093556796962656759050531176732990872300033146738631772984017549903765305); + + verifier = new VerifierRecursiveTest(); + } + + function testMoreThan4WordsRecursiveInput_shouldRevert() public { + uint256[] memory newSerializedProof = new uint256[](serializedProof.length + 1); + + for (uint256 i = 0; i < serializedProof.length; i++) { + newSerializedProof[i] = serializedProof[i]; + } + newSerializedProof[newSerializedProof.length - 1] = serializedProof[serializedProof.length - 1]; + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testEmptyRecursiveInput_shouldRevert() public { + uint256[] memory newSerializedProof = new uint256[](serializedProof.length - 4); + for (uint256 i = 0; i < newSerializedProof.length; i++) { + newSerializedProof[i] = serializedProof[i]; + } + + vm.expectRevert(bytes("loadProof: Proof is invalid")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testInvalidRecursiveInput_shouldRevert() public { + uint256[] memory newSerializedProof = serializedProof; + newSerializedProof[newSerializedProof.length - 4] = 1; + newSerializedProof[newSerializedProof.length - 3] = 2; + newSerializedProof[newSerializedProof.length - 2] = 1; + newSerializedProof[newSerializedProof.length - 1] = 2; + + vm.expectRevert(bytes("finalPairing: pairing failure")); + verifier.verify(publicInputs, newSerializedProof); + } + + function testVerificationKeyHash() public override { + bytes32 verificationKeyHash = verifier.verificationKeyHash(); + assertEq(verificationKeyHash, 0x88b3ddc4ed85974c7e14297dcad4097169440305c05fdb6441ca8dfd77cd7fa7); + } +} diff --git a/l2-contracts/test/test-utils.ts b/l2-contracts/test/test-utils.ts deleted file mode 100644 index c62d76c11..000000000 --- a/l2-contracts/test/test-utils.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { ethers } from "ethers"; - -const L1_TO_L2_ALIAS_OFFSET = "0x1111000000000000000000000000000000001111"; -const ADDRESS_MODULO = ethers.BigNumber.from(2).pow(160); - -export function unapplyL1ToL2Alias(address: string): string { - // We still add ADDRESS_MODULO to avoid negative numbers - return ethers.utils.hexlify( - ethers.BigNumber.from(address).sub(L1_TO_L2_ALIAS_OFFSET).add(ADDRESS_MODULO).mod(ADDRESS_MODULO) - ); -} diff --git a/l2-contracts/test/weth.test.ts b/l2-contracts/test/weth.test.ts deleted file mode 100644 index 00bb921a0..000000000 --- a/l2-contracts/test/weth.test.ts +++ /dev/null @@ -1,125 +0,0 @@ -import { Deployer } from "@matterlabs/hardhat-zksync-deploy"; -import { expect } from "chai"; -import { ethers } from "ethers"; -import * as hre from "hardhat"; -import { Provider, Wallet } from "zksync-web3"; -import type { L2WrappedBaseToken } from "../typechain/L2WrappedBaseToken"; -import type { L2SharedBridge } from "../typechain/L2SharedBridge"; -import { L2SharedBridgeFactory } from "../typechain/L2SharedBridgeFactory"; -import { L2WrappedBaseTokenFactory } from "../typechain/L2WrappedBaseTokenFactory"; - -const richAccount = { - address: "0x36615Cf349d7F6344891B1e7CA7C72883F5dc049", - privateKey: "0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110", -}; - -const eth18 = ethers.utils.parseEther("18"); -const testChainId = 9; - -describe("WETH token & WETH bridge", function () { - const provider = new Provider(hre.config.networks.localhost.url); - const wallet = new Wallet(richAccount.privateKey, provider); - let wethToken: L2WrappedBaseToken; - let wethBridge: L2SharedBridge; - - before("Deploy token and bridge", async function () { - const deployer = new Deployer(hre, wallet); - const wethTokenImpl = await deployer.deploy(await deployer.loadArtifact("L2WrappedBaseToken")); - const wethBridgeImpl = await deployer.deploy(await deployer.loadArtifact("L2SharedBridge"), [testChainId]); - const randomAddress = ethers.utils.hexlify(ethers.utils.randomBytes(20)); - - const wethTokenProxy = await deployer.deploy(await deployer.loadArtifact("TransparentUpgradeableProxy"), [ - wethTokenImpl.address, - randomAddress, - "0x", - ]); - const wethBridgeProxy = await deployer.deploy(await deployer.loadArtifact("TransparentUpgradeableProxy"), [ - wethBridgeImpl.address, - randomAddress, - "0x", - ]); - - wethToken = L2WrappedBaseTokenFactory.connect(wethTokenProxy.address, wallet); - wethBridge = L2SharedBridgeFactory.connect(wethBridgeProxy.address, wallet); - - // await wethToken.initialize(); - await wethToken.initializeV2("Wrapped Ether", "WETH", wethBridge.address, randomAddress); - - // await wethBridge.initialize(randomAddress, randomAddress, wethToken.address); - }); - - it("Should deposit WETH by calling deposit()", async function () { - await wethToken.deposit({ value: eth18 }).then((tx) => tx.wait()); - expect(await wethToken.balanceOf(wallet.address)).to.equal(eth18); - }); - - it("Should deposit WETH by sending", async function () { - await wallet - .sendTransaction({ - to: wethToken.address, - value: eth18, - }) - .then((tx) => tx.wait()); - expect(await wethToken.balanceOf(wallet.address)).to.equal(eth18.mul(2)); - }); - - it("Should fail depositing with random calldata", async function () { - await expect( - wallet.sendTransaction({ - data: ethers.utils.randomBytes(36), - to: wethToken.address, - value: eth18, - gasLimit: 100_000, - }) - ).to.be.reverted; - }); - - it("Should withdraw WETH to L2 ETH", async function () { - await wethToken.withdraw(eth18).then((tx) => tx.wait()); - expect(await wethToken.balanceOf(wallet.address)).to.equal(eth18); - }); - - // bridging not supported - // it("Should withdraw WETH to L1 ETH", async function () { - // await expect(wethBridge.withdraw(wallet.address, wethToken.address, eth18.div(2))) - // .to.emit(wethBridge, "WithdrawalInitiated") - // .and.to.emit(wethToken, "BridgeBurn"); - // expect(await wethToken.balanceOf(wallet.address)).to.equal(eth18.div(2)); - // }); - - it("Should deposit WETH to another account", async function () { - const anotherWallet = new Wallet(ethers.utils.randomBytes(32), provider); - await wethToken.depositTo(anotherWallet.address, { value: eth18 }).then((tx) => tx.wait()); - expect(await wethToken.balanceOf(anotherWallet.address)).to.equal(eth18); - }); - - it("Should withdraw WETH to another account", async function () { - const anotherWallet = new Wallet(ethers.utils.randomBytes(32), provider); - await wethToken.withdrawTo(anotherWallet.address, eth18.div(2)).then((tx) => tx.wait()); - expect(await anotherWallet.getBalance()).to.equal(eth18.div(2)); - expect(await wethToken.balanceOf(wallet.address)).to.equal(eth18.div(2)); - }); - - it("Should fail withdrawing with insufficient balance", async function () { - await expect(wethToken.withdraw(1, { gasLimit: 100_000 })).to.be.reverted; - }); - - // bridging not supported - // it("Should fail depositing directly to WETH bridge", async function () { - // await expect( - // wallet.sendTransaction({ - // to: wethBridge.address, - // value: eth18, - // gasLimit: 100_000, - // }) - // ).to.be.reverted; - // }); - - it("Should fail calling bridgeMint()", async function () { - await expect(await wethToken.bridgeMint(wallet.address, eth18, { gasLimit: 1_000_000 })).to.be.reverted; - }); - - it("Should fail calling bridgeBurn() directly", async function () { - await expect(wethToken.bridgeBurn(wallet.address, eth18, { gasLimit: 100_000 })).to.be.reverted; - }); -}); diff --git a/lib/@matterlabs/zksync-contracts b/lib/@matterlabs/zksync-contracts new file mode 160000 index 000000000..b8449bf9c --- /dev/null +++ b/lib/@matterlabs/zksync-contracts @@ -0,0 +1 @@ +Subproject commit b8449bf9c819098cc8bfee0549ff5094456be51d diff --git a/lib/forge-std b/lib/forge-std new file mode 160000 index 000000000..bf6606142 --- /dev/null +++ b/lib/forge-std @@ -0,0 +1 @@ +Subproject commit bf6606142994b1e47e2882ce0cd477c020d77623 diff --git a/lib/murky b/lib/murky new file mode 160000 index 000000000..5feccd125 --- /dev/null +++ b/lib/murky @@ -0,0 +1 @@ +Subproject commit 5feccd1253d7da820f7cccccdedf64471025455d diff --git a/l1-contracts/lib/openzeppelin-contracts-upgradeable b/lib/openzeppelin-contracts-upgradeable-v4 similarity index 100% rename from l1-contracts/lib/openzeppelin-contracts-upgradeable rename to lib/openzeppelin-contracts-upgradeable-v4 diff --git a/lib/openzeppelin-contracts-v4 b/lib/openzeppelin-contracts-v4 new file mode 160000 index 000000000..dc44c9f1a --- /dev/null +++ b/lib/openzeppelin-contracts-v4 @@ -0,0 +1 @@ +Subproject commit dc44c9f1a4c3b10af99492eed84f83ed244203f6 diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..f0578a39b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,4825 @@ +{ + "name": "era-contracts", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "era-contracts", + "version": "0.1.0", + "devDependencies": { + "@matterlabs/eslint-config-typescript": "^1.1.2", + "@matterlabs/prettier-config": "^1.0.3", + "@typescript-eslint/eslint-plugin": "^6.7.4", + "@typescript-eslint/parser": "^6.7.4", + "eslint": "^8.51.0", + "eslint-import-resolver-typescript": "^3.6.1", + "eslint-plugin-import": "^2.29.0", + "eslint-plugin-prettier": "^5.0.1", + "markdownlint-cli": "^0.33.0", + "prettier": "^3.0.3", + "prettier-plugin-solidity": "^1.1.3", + "solhint": "4.5.4" + }, + "workspaces": { + "packages": [ + "l1-contracts", + "l2-contracts", + "da-contracts", + "system-contracts", + "gas-bound-caller" + ], + "nohoist": [ + "**/@openzeppelin/**" + ] + } + }, + "../../DoesItMatter": {}, + "l1-contracts": { + "version": "0.1.0", + "extraneous": true, + "license": "MIT", + "dependencies": { + "@openzeppelin/contracts-v4": "*", + "dotenv": "^16.0.3", + "yarn": "*" + }, + "devDependencies": { + "@nomiclabs/hardhat-ethers": "^2.0.0", + "@nomiclabs/hardhat-etherscan": "^3.1.0", + "@nomiclabs/hardhat-solpp": "^2.0.0", + "@nomiclabs/hardhat-waffle": "^2.0.0", + "@openzeppelin/contracts": "^4.9.5", + "@openzeppelin/contracts-upgradeable": "4.9.5", + "@typechain/ethers-v5": "^2.0.0", + "@types/argparse": "^1.0.36", + "@types/chai": "^4.2.21", + "@types/chai-as-promised": "^7.1.4", + "@types/mocha": "^8.2.3", + "argparse": "^1.0.10", + "axios": "^0.21.1", + "chai": "^4.3.10", + "chai-as-promised": "^7.1.1", + "chalk": "^4.1.0", + "collections": "^5.1.12", + "commander": "^8.3.0", + "eslint": "^8.51.0", + "eslint-import-resolver-typescript": "^3.6.1", + "eslint-plugin-import": "^2.29.0", + "eslint-plugin-prettier": "^5.0.1", + "ethereum-waffle": "^4.0.10", + "ethereumjs-abi": "^0.6.8", + "ethers": "^5.7.0", + "ethjs": "^0.4.0", + "fs": "^0.0.1-security", + "handlebars": "^4.7.6", + "hardhat": "^2.18.3", + "hardhat-contract-sizer": "^2.0.2", + "hardhat-gas-reporter": "^1.0.9", + "hardhat-typechain": "^0.3.3", + "jsonwebtoken": "^8.5.1", + "markdownlint-cli": "^0.33.0", + "merkletreejs": "^0.3.11", + "mocha": "^9.0.2", + "path": "^0.12.7", + "querystring": "^0.2.0", + "solc": "0.8.17", + "solhint": "^3.6.2", + "solidity-coverage": "^0.8.5", + "ts-generator": "^0.1.1", + "ts-node": "^10.1.0", + "typechain": "^4.0.0", + "typescript": "^4.6.4" + }, + "optionalDependencies": { + "zksync-web3": "^0.14.3" + } + }, + "l2-contracts": { + "version": "0.1.0", + "extraneous": true, + "license": "MIT", + "dependencies": { + "dotenv": "^16.0.3" + }, + "devDependencies": { + "@matterlabs/hardhat-zksync-deploy": "^0.6.5", + "@matterlabs/hardhat-zksync-solc": "^0.3.15", + "@matterlabs/hardhat-zksync-verify": "^0.2.0", + "@nomicfoundation/hardhat-chai-matchers": "^1.0.6", + "@nomicfoundation/hardhat-ethers": "^3.0.4", + "@nomicfoundation/hardhat-verify": "^1.1.0", + "@nomiclabs/hardhat-ethers": "^2.0.0", + "@nomiclabs/hardhat-etherscan": "^3.1.7", + "@nomiclabs/hardhat-solpp": "^2.0.0", + "@openzeppelin/contracts": "4.9.5", + "@openzeppelin/contracts-upgradeable": "4.9.5", + "@typechain/ethers-v5": "^2.0.0", + "@types/chai": "^4.2.21", + "@types/chai-as-promised": "^7.1.4", + "@types/mocha": "^8.2.3", + "chai": "^4.3.10", + "chai-as-promised": "^7.1.1", + "chalk": "^4.1.0", + "commander": "^6.0.0", + "ethers": "^5.7.0", + "hardhat": "^2.18.3", + "hardhat-typechain": "^0.3.3", + "mocha": "^9.0.2", + "ts-node": "^10.1.0", + "typechain": "^4.0.0", + "typescript": "^5.2.2", + "zksync-web3": "^0.15.4" + } + }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.22.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", + "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.22.13", + "chalk": "^2.4.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/code-frame/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/code-frame/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/code-frame/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", + "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", + "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.22.20", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.3.tgz", + "integrity": "sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "8.53.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.53.0.tgz", + "integrity": "sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.13", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz", + "integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz", + "integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==", + "dev": true + }, + "node_modules/@matterlabs/eslint-config-typescript": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@matterlabs/eslint-config-typescript/-/eslint-config-typescript-1.1.2.tgz", + "integrity": "sha512-AhiWJQr+MSE3RVfgp5XwGoMK7kNSKh6a18+T7hkNJtyycP0306I6IGmuFA5ZVbcakGb+K32fQWzepSkrNCTAGg==", + "dev": true, + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "^6.5.0", + "@typescript-eslint/parser": "^6.5.0", + "eslint": "^8.48.0", + "eslint-import-resolver-typescript": "^3.6.1", + "eslint-plugin-import": "^2.28.1", + "eslint-plugin-prettier": "^5.0.0", + "prettier": "^3.0.3" + } + }, + "node_modules/@matterlabs/prettier-config": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@matterlabs/prettier-config/-/prettier-config-1.0.3.tgz", + "integrity": "sha512-JW7nHREPqEtjBWz3EfxLarkmJBD8vi7Kx/1AQ6eBZnz12eHc1VkOyrc6mpR5ogTf0dOUNXFAfZut+cDe2dn4kQ==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgr/utils": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.4.2.tgz", + "integrity": "sha512-POgTXhjrTfbTV63DiFXav4lBHiICLKKwDeaKn9Nphwj7WH6m0hMMCaJkMyRWjgtPFyRKRVoMXXjczsTQRDEhYw==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "fast-glob": "^3.3.0", + "is-glob": "^4.0.3", + "open": "^9.1.0", + "picocolors": "^1.0.0", + "tslib": "^2.6.0" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/@pkgr/utils/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "dev": true + }, + "node_modules/@pnpm/config.env-replace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz", + "integrity": "sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w==", + "dev": true, + "engines": { + "node": ">=12.22.0" + } + }, + "node_modules/@pnpm/network.ca-file": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz", + "integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==", + "dev": true, + "dependencies": { + "graceful-fs": "4.2.10" + }, + "engines": { + "node": ">=12.22.0" + } + }, + "node_modules/@pnpm/npm-conf": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-2.3.1.tgz", + "integrity": "sha512-c83qWb22rNRuB0UaVCI0uRPNRr8Z0FWnEIvT47jiHAmOIUHbBOg5XvV7pM5x+rKn9HRpjxquDbXYSXr3fAKFcw==", + "dev": true, + "dependencies": { + "@pnpm/config.env-replace": "^1.1.0", + "@pnpm/network.ca-file": "^1.0.1", + "config-chain": "^1.1.11" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@sindresorhus/is": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.6.0.tgz", + "integrity": "sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/@solidity-parser/parser": { + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.16.2.tgz", + "integrity": "sha512-PI9NfoA3P8XK2VBkK5oIfRgKDsicwDZfkVq9ZTBCQYGOP1N2owgY2dyLGyU5/J/hQs8KRk55kdmvTLjy3Mu3vg==", + "dev": true, + "dependencies": { + "antlr4ts": "^0.5.0-alpha.4" + } + }, + "node_modules/@szmarczak/http-timer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", + "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", + "dev": true, + "dependencies": { + "defer-to-connect": "^2.0.1" + }, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/@types/http-cache-semantics": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", + "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", + "dev": true + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true + }, + "node_modules/@types/semver": { + "version": "7.5.5", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.5.tgz", + "integrity": "sha512-+d+WYC1BxJ6yVOgUgzK8gWvp5qF8ssV5r4nsDcZWKRWcDQLQ619tvWAxJQYGgBrO1MnLJC7a5GtiYsAoQ47dJg==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.11.0.tgz", + "integrity": "sha512-uXnpZDc4VRjY4iuypDBKzW1rz9T5YBBK0snMn8MaTSNd2kMlj50LnLBABELjJiOL5YHk7ZD8hbSpI9ubzqYI0w==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.5.1", + "@typescript-eslint/scope-manager": "6.11.0", + "@typescript-eslint/type-utils": "6.11.0", + "@typescript-eslint/utils": "6.11.0", + "@typescript-eslint/visitor-keys": "6.11.0", + "debug": "^4.3.4", + "graphemer": "^1.4.0", + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@typescript-eslint/parser": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.11.0.tgz", + "integrity": "sha512-+whEdjk+d5do5nxfxx73oanLL9ghKO3EwM9kBCkUtWMRwWuPaFv9ScuqlYfQ6pAD6ZiJhky7TZ2ZYhrMsfMxVQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "6.11.0", + "@typescript-eslint/types": "6.11.0", + "@typescript-eslint/typescript-estree": "6.11.0", + "@typescript-eslint/visitor-keys": "6.11.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.11.0.tgz", + "integrity": "sha512-0A8KoVvIURG4uhxAdjSaxy8RdRE//HztaZdG8KiHLP8WOXSk0vlF7Pvogv+vlJA5Rnjj/wDcFENvDaHb+gKd1A==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.11.0", + "@typescript-eslint/visitor-keys": "6.11.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.11.0.tgz", + "integrity": "sha512-nA4IOXwZtqBjIoYrJcYxLRO+F9ri+leVGoJcMW1uqr4r1Hq7vW5cyWrA43lFbpRvQ9XgNrnfLpIkO3i1emDBIA==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "6.11.0", + "@typescript-eslint/utils": "6.11.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.11.0.tgz", + "integrity": "sha512-ZbEzuD4DwEJxwPqhv3QULlRj8KYTAnNsXxmfuUXFCxZmO6CF2gM/y+ugBSAQhrqaJL3M+oe4owdWunaHM6beqA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.11.0.tgz", + "integrity": "sha512-Aezzv1o2tWJwvZhedzvD5Yv7+Lpu1by/U1LZ5gLc4tCx8jUmuSCMioPFRjliN/6SJIvY6HpTtJIWubKuYYYesQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.11.0", + "@typescript-eslint/visitor-keys": "6.11.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@typescript-eslint/utils": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.11.0.tgz", + "integrity": "sha512-p23ibf68fxoZy605dc0dQAEoUsoiNoP3MD9WQGiHLDuTSOuqoTsa4oAy+h3KDkTcxbbfOtUjb9h3Ta0gT4ug2g==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "6.11.0", + "@typescript-eslint/types": "6.11.0", + "@typescript-eslint/typescript-estree": "6.11.0", + "semver": "^7.5.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.11.0.tgz", + "integrity": "sha512-+SUN/W7WjBr05uRxPggJPSzyB8zUpaYo2hByKasWbqr3PM8AXfZt8UHdNpBS1v9SA62qnSSMF3380SwDqqprgQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.11.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, + "node_modules/acorn": { + "version": "8.11.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz", + "integrity": "sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/antlr4": { + "version": "4.13.1", + "resolved": "https://registry.npmjs.org/antlr4/-/antlr4-4.13.1.tgz", + "integrity": "sha512-kiXTspaRYvnIArgE97z5YVVf/cDVQABr3abFRR6mE7yesLMkgu4ujuyV/sgxafQ8wgve0DJQUJ38Z8tkgA2izA==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/antlr4ts": { + "version": "0.5.0-alpha.4", + "resolved": "https://registry.npmjs.org/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz", + "integrity": "sha512-WPQDt1B74OfPv/IMS2ekXAKkTZIHl88uMetg6q3OTqgFxZ/dxDXI0EWLyZid/1Pe6hTftyg5N7gel5wNAGxXyQ==", + "dev": true + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", + "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "is-array-buffer": "^3.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-includes": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", + "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", + "is-string": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", + "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0", + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", + "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", + "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", + "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.0", + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", + "is-array-buffer": "^3.0.2", + "is-shared-array-buffer": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ast-parents": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/ast-parents/-/ast-parents-0.0.1.tgz", + "integrity": "sha512-XHusKxKz3zoYk1ic8Un640joHbFMhbqneyoZfoKnEGtf2ey9Uh/IdpcQplODdO/kENaMIWsD0nJm4+wX3UNLHA==", + "dev": true + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/big-integer": { + "version": "1.6.51", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", + "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==", + "dev": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/bplist-parser": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", + "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", + "dev": true, + "dependencies": { + "big-integer": "^1.6.44" + }, + "engines": { + "node": ">= 5.10.0" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "dev": true, + "dependencies": { + "run-applescript": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cacheable-lookup": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", + "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", + "dev": true, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/cacheable-request": { + "version": "10.2.14", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-10.2.14.tgz", + "integrity": "sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==", + "dev": true, + "dependencies": { + "@types/http-cache-semantics": "^4.0.2", + "get-stream": "^6.0.1", + "http-cache-semantics": "^4.1.1", + "keyv": "^4.5.3", + "mimic-response": "^4.0.0", + "normalize-url": "^8.0.0", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/config-chain": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", + "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", + "dev": true, + "dependencies": { + "ini": "^1.3.4", + "proto-list": "~1.2.1" + } + }, + "node_modules/config-chain/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + }, + "node_modules/cosmiconfig": { + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", + "dev": true, + "dependencies": { + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/da-contracts": { + "resolved": "../../DoesItMatter", + "link": true + }, + "node_modules/debug": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "dev": true, + "dependencies": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "dev": true, + "dependencies": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/enhanced-resolve": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz", + "integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/entities": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", + "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-abstract": { + "version": "1.22.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", + "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.0", + "arraybuffer.prototype.slice": "^1.0.2", + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.5", + "es-set-tostringtag": "^2.0.1", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.2", + "get-symbol-description": "^1.0.0", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0", + "internal-slot": "^1.0.5", + "is-array-buffer": "^3.0.2", + "is-callable": "^1.2.7", + "is-negative-zero": "^2.0.2", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.12", + "is-weakref": "^1.0.2", + "object-inspect": "^1.13.1", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.5.1", + "safe-array-concat": "^1.0.1", + "safe-regex-test": "^1.0.0", + "string.prototype.trim": "^1.2.8", + "string.prototype.trimend": "^1.0.7", + "string.prototype.trimstart": "^1.0.7", + "typed-array-buffer": "^1.0.0", + "typed-array-byte-length": "^1.0.0", + "typed-array-byte-offset": "^1.0.0", + "typed-array-length": "^1.0.4", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", + "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.2", + "has-tostringtag": "^1.0.0", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.53.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.53.0.tgz", + "integrity": "sha512-N4VuiPjXDUa4xVeV/GC/RV3hQW9Nw+Y463lkWaKKXKYMvmRiRDAtfpuPFLN+E1/6ZhyR8J2ig+eVREnYgUsiag==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.3", + "@eslint/js": "8.53.0", + "@humanwhocodes/config-array": "^0.11.13", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", + "dev": true, + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-import-resolver-typescript": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz", + "integrity": "sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg==", + "dev": true, + "dependencies": { + "debug": "^4.3.4", + "enhanced-resolve": "^5.12.0", + "eslint-module-utils": "^2.7.4", + "fast-glob": "^3.3.1", + "get-tsconfig": "^4.5.0", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", + "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "dev": true, + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.0.tgz", + "integrity": "sha512-QPOO5NO6Odv5lpoTkddtutccQjysJuFxoPS7fAHO+9m9udNHvTCPSAMW9zGAYj8lAIdr40I8yPCdUYrncXtrwg==", + "dev": true, + "dependencies": { + "array-includes": "^3.1.7", + "array.prototype.findlastindex": "^1.2.3", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.8.0", + "hasown": "^2.0.0", + "is-core-module": "^2.13.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.7", + "object.groupby": "^1.0.1", + "object.values": "^1.1.7", + "semver": "^6.3.1", + "tsconfig-paths": "^3.14.2" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.0.1.tgz", + "integrity": "sha512-m3u5RnR56asrwV/lDC4GHorlW75DsFfmUcjfCYylTUs85dBRnB7VM6xG8eCMJdeDRnppzmxZVf1GEPJvl1JmNg==", + "dev": true, + "dependencies": { + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.8.5" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/prettier" + }, + "peerDependencies": { + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "prettier": ">=3.0.0" + }, + "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-diff": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", + "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.2.9", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz", + "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", + "dev": true + }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/form-data-encoder": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", + "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", + "dev": true, + "engines": { + "node": ">= 14.17" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gas-bound-caller": { + "resolved": "../../DoesItMatter", + "link": true + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-stdin": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-9.0.0.tgz", + "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-tsconfig": { + "version": "4.7.2", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.2.tgz", + "integrity": "sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==", + "dev": true, + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "13.23.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz", + "integrity": "sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/got": { + "version": "12.6.1", + "resolved": "https://registry.npmjs.org/got/-/got-12.6.1.tgz", + "integrity": "sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==", + "dev": true, + "dependencies": { + "@sindresorhus/is": "^5.2.0", + "@szmarczak/http-timer": "^5.0.1", + "cacheable-lookup": "^7.0.0", + "cacheable-request": "^10.2.8", + "decompress-response": "^6.0.0", + "form-data-encoder": "^2.1.2", + "get-stream": "^6.0.1", + "http2-wrapper": "^2.1.10", + "lowercase-keys": "^3.0.0", + "p-cancelable": "^3.0.0", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "node_modules/http2-wrapper": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.1.tgz", + "integrity": "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==", + "dev": true, + "dependencies": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.2.0" + }, + "engines": { + "node": ">=10.19.0" + } + }, + "node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "dev": true, + "engines": { + "node": ">=14.18.0" + } + }, + "node_modules/ignore": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz", + "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/ini": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ini/-/ini-3.0.1.tgz", + "integrity": "sha512-it4HyVAUTKBc6m8e1iXWvXSTdndF7HbdN713+kvLrymxTaU4AUBWrJ4vEooP+V7fexnVD3LKcBshjGGPefSMUQ==", + "dev": true, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/internal-slot": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", + "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.2", + "hasown": "^2.0.0", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", + "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.0", + "is-typed-array": "^1.1.10" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dev": true, + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "dev": true, + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "dev": true, + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", + "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "dev": true, + "dependencies": { + "which-typed-array": "^1.1.11" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-wsl/node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true, + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/js-yaml/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "dev": true + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/l1-contracts": { + "resolved": "../../DoesItMatter", + "link": true + }, + "node_modules/l2-contracts": { + "resolved": "../../DoesItMatter", + "link": true + }, + "node_modules/latest-version": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz", + "integrity": "sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==", + "dev": true, + "dependencies": { + "package-json": "^8.1.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/linkify-it": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-4.0.1.tgz", + "integrity": "sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw==", + "dev": true, + "dependencies": { + "uc.micro": "^1.0.1" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==", + "dev": true + }, + "node_modules/lowercase-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", + "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/markdown-it": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-13.0.1.tgz", + "integrity": "sha512-lTlxriVoy2criHP0JKRhO2VDG9c2ypWCsT237eDiLqi09rmbKoUetyGHq2uOIRoRS//kfoJckS0eUzzkDR+k2Q==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1", + "entities": "~3.0.1", + "linkify-it": "^4.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/markdown-it/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/markdownlint": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.27.0.tgz", + "integrity": "sha512-HtfVr/hzJJmE0C198F99JLaeada+646B5SaG2pVoEakLFI6iRGsvMqrnnrflq8hm1zQgwskEgqSnhDW11JBp0w==", + "dev": true, + "dependencies": { + "markdown-it": "13.0.1" + }, + "engines": { + "node": ">=14.18.0" + } + }, + "node_modules/markdownlint-cli": { + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/markdownlint-cli/-/markdownlint-cli-0.33.0.tgz", + "integrity": "sha512-zMK1oHpjYkhjO+94+ngARiBBrRDEUMzooDHBAHtmEIJ9oYddd9l3chCReY2mPlecwH7gflQp1ApilTo+o0zopQ==", + "dev": true, + "dependencies": { + "commander": "~9.4.1", + "get-stdin": "~9.0.0", + "glob": "~8.0.3", + "ignore": "~5.2.4", + "js-yaml": "^4.1.0", + "jsonc-parser": "~3.2.0", + "markdownlint": "~0.27.0", + "minimatch": "~5.1.2", + "run-con": "~1.2.11" + }, + "bin": { + "markdownlint": "markdownlint.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/markdownlint-cli/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/markdownlint-cli/node_modules/commander": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.4.1.tgz", + "integrity": "sha512-5EEkTNyHNGFPD2H+c/dXXfQZYa/scCKasxWcXJaWnNJ99pnQN9Vnmqow+p+PlFPE63Q6mThaZws1T+HxfpgtPw==", + "dev": true, + "engines": { + "node": "^12.20.0 || >=14" + } + }, + "node_modules/markdownlint-cli/node_modules/glob": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz", + "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/markdownlint-cli/node_modules/ignore": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", + "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/markdownlint-cli/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", + "dev": true + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mimic-response": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", + "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/normalize-url": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.0.1.tgz", + "integrity": "sha512-IO9QvjUMWxPQQhs60oOu10CRkWCiZzSUkzbXGGV9pviYl1fXYcvkzQ5jV9z8Y6un8ARoVRl4EtC6v6jNqbaJ/w==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz", + "integrity": "sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/object-inspect": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", + "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", + "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", + "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1" + } + }, + "node_modules/object.values": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", + "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", + "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", + "dev": true, + "dependencies": { + "default-browser": "^4.0.0", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "dev": true, + "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-cancelable": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", + "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==", + "dev": true, + "engines": { + "node": ">=12.20" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-json": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-8.1.1.tgz", + "integrity": "sha512-cbH9IAIJHNj9uXi196JVsRlt7cHKak6u/e6AkL/bkRelZ7rlL3X1YKxsZwa36xipOEKAsdtmaG6aAJoM1fx2zA==", + "dev": true, + "dependencies": { + "got": "^12.1.0", + "registry-auth-token": "^5.0.1", + "registry-url": "^6.0.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-json/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pluralize": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", + "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.1.0.tgz", + "integrity": "sha512-TQLvXjq5IAibjh8EpBIkNKxO749UEWABoiIZehEPiY4GNpVdhaFKqSTu+QrlU6D2dPAfubRmtJTi4K4YkQ5eXw==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/prettier-plugin-solidity": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/prettier-plugin-solidity/-/prettier-plugin-solidity-1.2.0.tgz", + "integrity": "sha512-fgxcUZpVAP+LlRfy5JI5oaAkXGkmsje2VJ5krv/YMm+rcTZbIUwFguSw5f+WFuttMjpDm6wB4UL7WVkArEfiVA==", + "dev": true, + "dependencies": { + "@solidity-parser/parser": "^0.16.2", + "semver": "^7.5.4", + "solidity-comments-extractor": "^0.0.7" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "prettier": ">=2.3.0" + } + }, + "node_modules/prettier-plugin-solidity/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/prettier-plugin-solidity/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/prettier-plugin-solidity/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/proto-list": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", + "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", + "dev": true + }, + "node_modules/punycode": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.0.tgz", + "integrity": "sha512-Yxz2kRwT90aPiWEMHVYnEf4+rhwF1tBmmZ4KepCP+Wkium9JxtWnUm1nqGwpiAHr/tnTSeHqr3wb++jgSkXjhA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dev": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/rc/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + }, + "node_modules/rc/node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", + "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "set-function-name": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/registry-auth-token": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.2.tgz", + "integrity": "sha512-o/3ikDxtXaA59BmZuZrJZDJv8NMDGSj+6j6XaeBmHw8eY1i1qd9+6H+LjVvQXx3HN6aRCGa1cUdJ9RaJZUugnQ==", + "dev": true, + "dependencies": { + "@pnpm/npm-conf": "^2.1.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/registry-url": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-6.0.1.tgz", + "integrity": "sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==", + "dev": true, + "dependencies": { + "rc": "1.2.8" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "dev": true + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/responselike": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", + "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", + "dev": true, + "dependencies": { + "lowercase-keys": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-applescript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", + "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", + "dev": true, + "dependencies": { + "execa": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/run-applescript/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/run-applescript/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/run-applescript/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/run-applescript/node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/run-con": { + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/run-con/-/run-con-1.2.12.tgz", + "integrity": "sha512-5257ILMYIF4RztL9uoZ7V9Q97zHtNHn5bN3NobeAnzB1P3ASLgg8qocM2u+R18ttp+VEM78N2LK8XcNVtnSRrg==", + "dev": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~3.0.0", + "minimist": "^1.2.8", + "strip-json-comments": "~3.1.1" + }, + "bin": { + "run-con": "cli.js" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-array-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz", + "integrity": "sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex-test": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", + "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "is-regex": "^1.1.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/solhint": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/solhint/-/solhint-4.5.4.tgz", + "integrity": "sha512-Cu1XiJXub2q1eCr9kkJ9VPv1sGcmj3V7Zb76B0CoezDOB9bu3DxKIFFH7ggCl9fWpEPD6xBmRLfZrYijkVmujQ==", + "dev": true, + "dependencies": { + "@solidity-parser/parser": "^0.18.0", + "ajv": "^6.12.6", + "antlr4": "^4.13.1-patch-1", + "ast-parents": "^0.0.1", + "chalk": "^4.1.2", + "commander": "^10.0.0", + "cosmiconfig": "^8.0.0", + "fast-diff": "^1.2.0", + "glob": "^8.0.3", + "ignore": "^5.2.4", + "js-yaml": "^4.1.0", + "latest-version": "^7.0.0", + "lodash": "^4.17.21", + "pluralize": "^8.0.0", + "semver": "^7.5.2", + "strip-ansi": "^6.0.1", + "table": "^6.8.1", + "text-table": "^0.2.0" + }, + "bin": { + "solhint": "solhint.js" + }, + "optionalDependencies": { + "prettier": "^2.8.3" + } + }, + "node_modules/solhint/node_modules/@solidity-parser/parser": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.18.0.tgz", + "integrity": "sha512-yfORGUIPgLck41qyN7nbwJRAx17/jAIXCTanHOJZhB6PJ1iAk/84b/xlsVKFSyNyLXIj0dhppoE0+CRws7wlzA==", + "dev": true + }, + "node_modules/solhint/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/solhint/node_modules/commander": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", + "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/solhint/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/solhint/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/solhint/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/solhint/node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "dev": true, + "optional": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/solhint/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/solhint/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/solidity-comments-extractor": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/solidity-comments-extractor/-/solidity-comments-extractor-0.0.7.tgz", + "integrity": "sha512-wciNMLg/Irp8OKGrh3S2tfvZiZ0NEyILfcRCXCD4mp7SgK/i9gzLfhY2hY7VMCQJ3kH9UB9BzNdibIVMchzyYw==", + "dev": true + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz", + "integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz", + "integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz", + "integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/synckit": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.5.tgz", + "integrity": "sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q==", + "dev": true, + "dependencies": { + "@pkgr/utils": "^2.3.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/synckit/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "dev": true + }, + "node_modules/system-contracts": { + "resolved": "../../DoesItMatter", + "link": true + }, + "node_modules/table": { + "version": "6.8.1", + "resolved": "https://registry.npmjs.org/table/-/table-6.8.1.tgz", + "integrity": "sha512-Y4X9zqrCftUhMeH2EptSSERdVKt/nEdijTOacGD/97EKjhQ/Qs8RTlEGABSJNNN8lac9kheH+af7yAkEWlgneA==", + "dev": true, + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/table/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/table/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/table/node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-api-utils": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.3.tgz", + "integrity": "sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg==", + "dev": true, + "engines": { + "node": ">=16.13.0" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/tsconfig-paths": { + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", + "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", + "dev": true, + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", + "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1", + "is-typed-array": "^1.1.10" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", + "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "has-proto": "^1.0.1", + "is-typed-array": "^1.1.10" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", + "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "has-proto": "^1.0.1", + "is-typed-array": "^1.1.10" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", + "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "is-typed-array": "^1.1.9" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "dev": true, + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "dev": true + }, + "node_modules/unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", + "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.4", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "system-contracts": { + "version": "0.1.0", + "extraneous": true, + "license": "MIT", + "dependencies": { + "@matterlabs/hardhat-zksync-deploy": "^0.6.5", + "@nomiclabs/hardhat-solpp": "^2.0.1", + "commander": "^9.4.1", + "ethers": "^5.7.0", + "fast-glob": "^3.3.2", + "hardhat": "^2.18.3", + "preprocess": "^3.2.0", + "zksync-web3": "^0.14.3" + }, + "devDependencies": { + "@matterlabs/hardhat-zksync-chai-matchers": "^0.1.4", + "@matterlabs/hardhat-zksync-node": "^0.0.1-beta.7", + "@matterlabs/hardhat-zksync-solc": "^0.4.2", + "@nomicfoundation/hardhat-chai-matchers": "^1.0.3", + "@nomiclabs/hardhat-ethers": "^2.0.0", + "@typechain/ethers-v5": "^2.0.0", + "@types/chai": "^4.2.21", + "@types/lodash": "^4.14.199", + "@types/mocha": "^8.2.3", + "@types/node": "^17.0.34", + "chai": "^4.3.10", + "hardhat-typechain": "^0.3.3", + "lodash": "^4.17.21", + "mocha": "^9.0.2", + "template-file": "^6.0.1", + "ts-generator": "^0.1.1", + "ts-node": "^10.1.0", + "typechain": "^4.0.0", + "typescript": "^4.6.4", + "zksync-ethers": "^5.0.0" + } + } + } +} diff --git a/system-contracts/.gitignore b/system-contracts/.gitignore new file mode 100644 index 000000000..16d545bb0 --- /dev/null +++ b/system-contracts/.gitignore @@ -0,0 +1,15 @@ +# Compiler files +cache/ +out/ +zkout/ + +# Ignores development broadcast logs +!/broadcast +/broadcast/*/31337/ +/broadcast/**/dry-run/ + +# Docs +docs/ + +# Dotenv file +.env diff --git a/system-contracts/README.md b/system-contracts/README.md index 1449c936f..4058a356b 100644 --- a/system-contracts/README.md +++ b/system-contracts/README.md @@ -1,10 +1,10 @@ -# zkSync Era: System Contracts +# ZKsync Era: System Contracts [![Logo](../eraLogo.svg)](https://zksync.io/) -zkSync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or +ZKsync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or decentralization. Since it's EVM compatible (Solidity/Vyper), 99% of Ethereum projects can redeploy without refactoring -or re-auditing a single line of code. zkSync Era also uses an LLVM-based compiler that will eventually let developers +or re-auditing a single line of code. ZKsync Era also uses an LLVM-based compiler that will eventually let developers write smart contracts in C++, Rust and other popular languages. ## system-contracts @@ -17,7 +17,7 @@ the most commonly used contracts: each deployed contract is known. This contract also defines the derivation address. Whenever a contract is deployed, a ContractDeployed event is emitted. -`L1Messenger` This contract is used to send messages from zkSync to Ethereum. For each message sent, the L1MessageSent +`L1Messenger` This contract is used to send messages from ZKsync to Ethereum. For each message sent, the L1MessageSent event is emitted. `NonceHolder` This contract stores account nonces. The account nonces are stored in a single place for efficiency (the @@ -43,7 +43,7 @@ Update the system contracts hashes: `yarn sc calculate-hashes:fix` ### Run tests -The tests of the system contracts utilize the zkSync test node. In order to run the tests, execute the following commands in the root of the repository: +The tests of the system contracts utilize the ZKsync test node. In order to run the tests, execute the following commands in the root of the repository: ``` yarn test-node @@ -150,7 +150,7 @@ changes. ## License -The zkSync Era system-contracts are distributed under the terms of the MIT license. +The ZKsync Era system-contracts are distributed under the terms of the MIT license. See [LICENSE-MIT](LICENSE-MIT) for details. @@ -166,7 +166,7 @@ See [LICENSE-MIT](LICENSE-MIT) for details. ## Disclaimer -zkSync Era has been through lots of testing and audits. Although it is live, it is still in alpha state and will go +ZKsync Era has been through lots of testing and audits. Although it is live, it is still in alpha state and will go through more audits and bug bounties programs. We would love to hear our community's thoughts and suggestions about it! It is important to state that forking it now can potentially lead to missing important security updates, critical features, and performance improvements. diff --git a/system-contracts/SystemContractsHashes.json b/system-contracts/SystemContractsHashes.json index 7cb1c5b9c..163458f1f 100644 --- a/system-contracts/SystemContractsHashes.json +++ b/system-contracts/SystemContractsHashes.json @@ -3,210 +3,231 @@ "contractName": "AccountCodeStorage", "bytecodePath": "artifacts-zk/contracts-preprocessed/AccountCodeStorage.sol/AccountCodeStorage.json", "sourceCodePath": "contracts-preprocessed/AccountCodeStorage.sol", - "bytecodeHash": "0x0100007549287362e4263ea5b204f01fc3c7f2ac09d71e6eb21029698220f01a", - "sourceCodeHash": "0xfbf66e830201c4b7fda14f0ddf28a53beb7fbb48a8406392bcfd0ef7ea9265c8" + "bytecodeHash": "0x0100005d3ae95fb62791ed4693e614755bd780011ffc3d2dea8344fb1284f9df", + "sourceCodeHash": "0x2e0e09d57a04bd1e722d8bf8c6423fdf3f8bca44e5e8c4f6684f987794be066e" }, { "contractName": "BootloaderUtilities", "bytecodePath": "artifacts-zk/contracts-preprocessed/BootloaderUtilities.sol/BootloaderUtilities.json", "sourceCodePath": "contracts-preprocessed/BootloaderUtilities.sol", - "bytecodeHash": "0x010007d1e53f2dca05f7e27ae5b7062291ed3a1470ca511140b8e786aae7eb77", - "sourceCodeHash": "0x9ff5a2da00acfa145ee4575381ad386587d96b6a0309d05015974f4726881132" + "bytecodeHash": "0x010007c7daac9a547e1e20ed650b09b21668c3fb49e23cea5113dd8cb224b9ac", + "sourceCodeHash": "0x0f1213c4b95acb71f4ab5d4082cc1aeb2bd5017e1cccd46afc66e53268609d85" }, { "contractName": "ComplexUpgrader", "bytecodePath": "artifacts-zk/contracts-preprocessed/ComplexUpgrader.sol/ComplexUpgrader.json", "sourceCodePath": "contracts-preprocessed/ComplexUpgrader.sol", - "bytecodeHash": "0x01000055c1f27b8316ba61bf07959b11cf3b2a418aa357ccc5531c0914a2da27", - "sourceCodeHash": "0x0aa5d7ed159e783acde47856b13801b7f2268ba39b2fa50807fe3d705c506e96" + "bytecodeHash": "0x0100004de3ddc8c5296fed145323ba1d6874af80469e40657bf0ff79d113ccb5", + "sourceCodeHash": "0x796046a914fb676ba2bbd337b2924311ee2177ce54571c18a2c3945755c83614" }, { "contractName": "Compressor", "bytecodePath": "artifacts-zk/contracts-preprocessed/Compressor.sol/Compressor.json", "sourceCodePath": "contracts-preprocessed/Compressor.sol", - "bytecodeHash": "0x01000179842b5aa1c76036f5b90652fe614dacb28438a89649d6ca48131bd402", - "sourceCodeHash": "0xd43ac120a50398e0d6bdcfcf807154bfeece0c231509a0eb2e00bcad744e60cd" + "bytecodeHash": "0x0100014b3784efd0fbc6825fa84f3dcf9fc1dcbed37a681c57098c347527ba21", + "sourceCodeHash": "0x7240b5fb2ea8e184522e731fb14f764ebae52b8a69d1870a55daedac9a3ed617" }, { "contractName": "ContractDeployer", "bytecodePath": "artifacts-zk/contracts-preprocessed/ContractDeployer.sol/ContractDeployer.json", "sourceCodePath": "contracts-preprocessed/ContractDeployer.sol", - "bytecodeHash": "0x010005215fda00bfbf95847a13078bd16cdcb1b875534261c1dda9940c7754fe", - "sourceCodeHash": "0x635301b824f927b4d17b3d9974cf6abbf979dda49e610805637db7c677d5f522" + "bytecodeHash": "0x010004e522c95733920e0a1f072b5dc36dd3d6a1b30515de48423575c10a8f7e", + "sourceCodeHash": "0x92bc09da23ed9d86ba7a84f0dbf48503c99582ae58cdbebbdcc5f14ea1fcf014" }, { "contractName": "Create2Factory", "bytecodePath": "artifacts-zk/contracts-preprocessed/Create2Factory.sol/Create2Factory.json", "sourceCodePath": "contracts-preprocessed/Create2Factory.sol", - "bytecodeHash": "0x0100004bc85f45ebf0f0bf004752bcbff1bb99792d6cc6494227970ec77fe53b", - "sourceCodeHash": "0x217e65f55c8add77982171da65e0db8cc10141ba75159af582973b332a4e098a" + "bytecodeHash": "0x01000049a4e4beb07895adcdcf34186af3d28a9f3c1d9b56c72c8464730755f1", + "sourceCodeHash": "0x114d9322a9ca654989f3e0b3b21f1311dbc4db84f443d054cd414f6414d84de3" }, { "contractName": "DefaultAccount", "bytecodePath": "artifacts-zk/contracts-preprocessed/DefaultAccount.sol/DefaultAccount.json", "sourceCodePath": "contracts-preprocessed/DefaultAccount.sol", - "bytecodeHash": "0x01000563374c277a2c1e34659a2a1e87371bb6d852ce142022d497bfb50b9e32", - "sourceCodeHash": "0xa42423712ddaa8f357d26e46825fda80a9a870d0ac7ff52c98884355f1173ec7" + "bytecodeHash": "0x0100055d3993e14104994ca4d8cfa91beb9b544ee86894b45708b4824d832ff2", + "sourceCodeHash": "0xebffe840ebbd9329edb1ebff8ca50f6935e7dabcc67194a896fcc2e968d46dfb" }, { "contractName": "EmptyContract", "bytecodePath": "artifacts-zk/contracts-preprocessed/EmptyContract.sol/EmptyContract.json", "sourceCodePath": "contracts-preprocessed/EmptyContract.sol", - "bytecodeHash": "0x0100000781e55a60f3f14fd7dd67e3c8caab896b7b0fca4a662583959299eede", - "sourceCodeHash": "0xc88a4210dda96bc21fc852860fb74a4efeb0cc4101ffe6d928551cab46d15263" + "bytecodeHash": "0x010000078f32964c38fbd138a0369f4723f07ac6f4919c45ef738c18bf874ccd", + "sourceCodeHash": "0xcac36c5afafbcff83601f4fbfdff660aa66d8c80ed97b9322d3011c1926b554d" }, { "contractName": "ImmutableSimulator", "bytecodePath": "artifacts-zk/contracts-preprocessed/ImmutableSimulator.sol/ImmutableSimulator.json", "sourceCodePath": "contracts-preprocessed/ImmutableSimulator.sol", - "bytecodeHash": "0x0100003de00c5ceaa3fdf4566a9822ce94abe676f68b17a6ae11c453e14455fd", - "sourceCodeHash": "0x30df621c72cb35b8820b902b91057f72d0214a0e4a6b7ad4c0847e674e8b9df8" + "bytecodeHash": "0x010000394846da43b9adfe72f0820c19d39daaf861e2eae55d6fe248840f641e", + "sourceCodeHash": "0x9659e69f7db09e8f60a8bb95314b1ed26afcc689851665cf27f5408122f60c98" }, { "contractName": "KnownCodesStorage", "bytecodePath": "artifacts-zk/contracts-preprocessed/KnownCodesStorage.sol/KnownCodesStorage.json", "sourceCodePath": "contracts-preprocessed/KnownCodesStorage.sol", - "bytecodeHash": "0x0100007d82d4a2eb62e539e3c89cc641f507132b247022ba05ef1ddfed2b0073", - "sourceCodeHash": "0x51d388adc58f67ef975a94a7978caa60ed8a0df9d3bd9ac723dfcfc540286c70" + "bytecodeHash": "0x0100006f1fa761c40d5b3325482c8bc9a577ac65278b624523b67eb99cf7e51c", + "sourceCodeHash": "0xb39b5b81168653e0c5062f7b8e1d6d15a4e186df3317f192f0cb2fc3a74f5448" }, { "contractName": "L1Messenger", "bytecodePath": "artifacts-zk/contracts-preprocessed/L1Messenger.sol/L1Messenger.json", "sourceCodePath": "contracts-preprocessed/L1Messenger.sol", - "bytecodeHash": "0x010002b97ebf3c481ead775617590ffca139bee428e443aa49eb38b6a5b83657", - "sourceCodeHash": "0x35c189f3babf5c7a9ce2590bed9eb62b59766e358b7733fdb1bc33f4c232f765" + "bytecodeHash": "0x010001f74edfe69d83816cc586cbb42b2a37d2649dcd1cab88052a37dffaadf3", + "sourceCodeHash": "0x8d22a4019347a45cb0c27bed9e98f7033637a7bdcd90fafb1922caa48f2b05de" }, { "contractName": "L2BaseToken", "bytecodePath": "artifacts-zk/contracts-preprocessed/L2BaseToken.sol/L2BaseToken.json", "sourceCodePath": "contracts-preprocessed/L2BaseToken.sol", - "bytecodeHash": "0x010001039329e4bb55b24531c7e7d27ed40d2c82ad145033fdd5ed5b8ea86cf3", - "sourceCodeHash": "0x76ac95c12820d9a02cd1f177eab59092d99463816f2616e1e0f44637bf791a43" + "bytecodeHash": "0x0100010395d69e52583bf981c6eb16a7f45a4e930671c69df04e24c58dba3648", + "sourceCodeHash": "0x8bdd2b4d0b53dba84c9f0af250bbaa2aad10b3de6747bba957f0bd3721090dfa" + }, + { + "contractName": "L2GatewayUpgrade", + "bytecodePath": "artifacts-zk/contracts-preprocessed/L2GatewayUpgrade.sol/L2GatewayUpgrade.json", + "sourceCodePath": "contracts-preprocessed/L2GatewayUpgrade.sol", + "bytecodeHash": "0x0100019db8d039fb6ef02a7aea5eb8cbfd917afdf3dc88659f28e18b6a43e5df", + "sourceCodeHash": "0xc69d0c1819a366fc91a43a9ad2d747540a7b8ee2071b48e4adfd301ec40e8600" + }, + { + "contractName": "L2GenesisUpgrade", + "bytecodePath": "artifacts-zk/contracts-preprocessed/L2GenesisUpgrade.sol/L2GenesisUpgrade.json", + "sourceCodePath": "contracts-preprocessed/L2GenesisUpgrade.sol", + "bytecodeHash": "0x010000fbf45473bbd4c0ef708beb28c5f40c10b22b0a289feb4ccbce9b7ae825", + "sourceCodeHash": "0xe21a58d96b2727020958e95605764b84f7f2f3cc4e0c01c4cdf2854f882d41ca" + }, + { + "contractName": "L2GenesisUpgradeHelper", + "bytecodePath": "artifacts-zk/contracts-preprocessed/L2GenesisUpgradeHelper.sol/L2GenesisUpgradeHelper.json", + "sourceCodePath": "contracts-preprocessed/L2GenesisUpgradeHelper.sol", + "bytecodeHash": "0x01000007e61dc52f7aff33c2c209b51ab7fbaf9b4668980f076fdabb24934f97", + "sourceCodeHash": "0xc2fd787a23e7935f8109f3eb35b71853fe3a9acf5cd96138afcb0e5b65002ba3" }, { "contractName": "MsgValueSimulator", "bytecodePath": "artifacts-zk/contracts-preprocessed/MsgValueSimulator.sol/MsgValueSimulator.json", "sourceCodePath": "contracts-preprocessed/MsgValueSimulator.sol", - "bytecodeHash": "0x010000695a1e821b6d5fcb25e25793b81de0bdca3ff8277e3ac93a38e729e0a1", - "sourceCodeHash": "0x3f9e0af527875bebcdc20ca4ecb6822305877fd6038e4c4c58854d000b9ac115" + "bytecodeHash": "0x0100005d895eb5ad625c93a99c3796c18e8fda2e34e9af6997c5208aea197cc2", + "sourceCodeHash": "0x082f3dcbc2fe4d93706c86aae85faa683387097d1b676e7ebd00f71ee0f13b71" }, { "contractName": "NonceHolder", "bytecodePath": "artifacts-zk/contracts-preprocessed/NonceHolder.sol/NonceHolder.json", "sourceCodePath": "contracts-preprocessed/NonceHolder.sol", - "bytecodeHash": "0x010000e563d4ad7b4822cc19d8f74f2c41ee3d3153379be4b02b27d4498d52b6", - "sourceCodeHash": "0x91847512344ac5026e9fd396189c23ad9e253f22cb6e2fe65805c20c915797d4" + "bytecodeHash": "0x010000d9515266d6a161d41fe0789fe8e75a31a3d8c0ce915ed09fa4ffcd7c61", + "sourceCodeHash": "0xcd0c0366effebf2c98c58cf96322cc242a2d1c675620ef5514b7ed1f0a869edc" }, { "contractName": "PubdataChunkPublisher", "bytecodePath": "artifacts-zk/contracts-preprocessed/PubdataChunkPublisher.sol/PubdataChunkPublisher.json", "sourceCodePath": "contracts-preprocessed/PubdataChunkPublisher.sol", - "bytecodeHash": "0x01000049eb6d79244e74e5286ed4d3f6eef2b5eb746b67d98691dbc28fa16984", - "sourceCodeHash": "0xbc62d673c2cf9ba2d2148e5e2f99ea577cd357c6fd3ad7d248f670c750050faa" + "bytecodeHash": "0x010000491ab9335e1a112a136580f1f1b2c2929d11be12aed00fd94925bc3fc1", + "sourceCodeHash": "0x04d3d2e4019081c87aae5c22a060d84ae2e9d631ebce59801ecce37b9c87e4c7" }, { "contractName": "SystemContext", "bytecodePath": "artifacts-zk/contracts-preprocessed/SystemContext.sol/SystemContext.json", "sourceCodePath": "contracts-preprocessed/SystemContext.sol", - "bytecodeHash": "0x010001b3f2c3a6bdd5ad00ae29a7cbbb32dca3c31fb608b5cd52f8f3056a3847", - "sourceCodeHash": "0xb90284d78f48a958d082c4c877fc91ec292d05f0e388c6c78e6cce6d3b069a63" + "bytecodeHash": "0x010001a7baaabeaf80186c0dec134f606186fa9f73f91e753806b424ca8f171f", + "sourceCodeHash": "0xb3b8c1f57928938ac590984442bc96c2c888282793014845d5ce2f90bbf2677f" }, { "contractName": "EventWriter", "bytecodePath": "contracts-preprocessed/artifacts/EventWriter.yul.zbin", "sourceCodePath": "contracts-preprocessed/EventWriter.yul", - "bytecodeHash": "0x010000159a3a08da3ac57cdefec0e9e30da60456bc5643134cf16d6957bcf1ac", - "sourceCodeHash": "0x55cfee65f174350edfd690c949bc0a29458f25da11f1d5f90b57621567df1fc3" + "bytecodeHash": "0x010000159b30cba9e2096353695b63ca5cbf566416a545a6bcb2ff2e4e672f98", + "sourceCodeHash": "0xfcf4828bcc109dea5f88c38f428d9ac5e18d5a2767fa4909277802c7e38c1f93" }, { "contractName": "CodeOracle", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/CodeOracle.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/CodeOracle.yul", - "bytecodeHash": "0x01000023b02bbb21baf1367835e56ae17b82688527dc8f78caf34b12e670ee65", - "sourceCodeHash": "0x55692fab0ef8b5bab3f6fb77aec84f3d1f1cdf97c0640b327d10594ea61218d2" + "bytecodeHash": "0x01000023d652655672eafbb0adc385bd423a4a59f752a28f3dde16e74fa205e3", + "sourceCodeHash": "0x476063e7907f2b7a532c4da6f606fa07186b5a10d77af8fdd83dbea3d9f23f93" }, { "contractName": "EcAdd", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/EcAdd.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/EcAdd.yul", - "bytecodeHash": "0x010000872dd7e2dc1b34416c174086aa84fd80c78acc7b670214da955bd55728", - "sourceCodeHash": "0xc04879ed27207cd276997a856b6507d6d003801a2ee4c4bb4491f0032370895f" + "bytecodeHash": "0x01000087be6181fcb16bebb0567c58b658eec345822aec1d42d471e84f758b85", + "sourceCodeHash": "0xdfec1c5f8c6a93df1c8821f1ac15058a18a640bcbdeb67dc4a017f2153ff1c86" }, { "contractName": "EcMul", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/EcMul.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/EcMul.yul", - "bytecodeHash": "0x010000bd8bd7ab008f76e359dc296ff5fe0e8a95fedce1d570943e90143acdfd", - "sourceCodeHash": "0xb142465167a02139087fda7640ff859489b33081dcc7c2a8089da5b480bcb58c" + "bytecodeHash": "0x010000bd553a916fcda3726f7b6b3ccfc17887166982915ced63abc78ba43b66", + "sourceCodeHash": "0x0e3f320c8a9532425b85809bf0a2136e707046a01bf20491ec03c77887516c43" }, { "contractName": "EcPairing", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/EcPairing.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/EcPairing.yul", - "bytecodeHash": "0x01000f1b3432a32f9fba2115f5dd3b0ee8127e7bf2c609d57d3e231f19119c43", - "sourceCodeHash": "0x149f025b222369ab65b9995a6d61df8b557b23f8b52a05f21dc2164839befb18" + "bytecodeHash": "0x01000f1b5f8dd50a00b502d2663746a49a81a01857b6ee1e1b38c9959142b299", + "sourceCodeHash": "0x5d008cedc44e0e52c2567fd2b877916b2ec5e7c80294cf99b66485e50a6f2c12" }, { "contractName": "Ecrecover", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/Ecrecover.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/Ecrecover.yul", - "bytecodeHash": "0x0100001112e34172b2bc31574d155893a087a1cf4b608cf9895a2201ea7bd6ee", - "sourceCodeHash": "0xe2334f04fa8003d448c7e6bfb345e644f2c851328aa5b49cb30acf45d6e0bbcf" + "bytecodeHash": "0x010000113d6b03e34605f26aa1fc6fb8953561eb55bb5ea192a5a38f7de3053b", + "sourceCodeHash": "0x21e03ab7a5f518a21258669c82506b1d4d1141f8fd4f30bb385f9730580ddd3c" }, { "contractName": "Keccak256", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/Keccak256.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/Keccak256.yul", - "bytecodeHash": "0x0100000f248e111a1b587fef850dc4585c39af2dd505bc8a0d5cc6d3fcc7ed3c", - "sourceCodeHash": "0x3e6b02b36eb6d8cebe19ae258c2aed531f9be6c261ae02d301ba31b2cd388776" + "bytecodeHash": "0x0100000ff991d5847f1e9c10c5969d0f03b34a25411ad86d5cb3e0d9c3931e0b", + "sourceCodeHash": "0xb454e7760732ce1fffc75174c8cf54dca422206cf1e52a29d274b310b574f26d" }, { "contractName": "P256Verify", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/P256Verify.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/P256Verify.yul", - "bytecodeHash": "0x0100001169cd6aa311c1bc9bbe2e7dd085720c96bb197e3223be7e9c66e46ef9", - "sourceCodeHash": "0x4fa14862937a646a2440a8ef5c4358b59e3e53dff5f11a65a1167cd31894b94c" + "bytecodeHash": "0x010000116595cfcc96291f95d47ede2ce630f25ccbd7428f00dc7f8135fb565a", + "sourceCodeHash": "0x976b68d0362307313fd1aaea309eaa2d849187f37da451618c70dd3a6ac3cf3c" }, { "contractName": "SHA256", "bytecodePath": "contracts-preprocessed/precompiles/artifacts/SHA256.yul.zbin", "sourceCodePath": "contracts-preprocessed/precompiles/SHA256.yul", - "bytecodeHash": "0x0100001752dc8a1a374a6346781205017b7b594d97c28812265865f3a45fcb45", - "sourceCodeHash": "0x6de4b57a9cca1cfda7a8edbf6f3e06aafa32c70458a3cc09972b548714ec51d3" + "bytecodeHash": "0x010000171e4e61b14feacd43cb555bffa5f194d38117132957708dcef83ac15a", + "sourceCodeHash": "0xfd4290467e26e992f39db9ca132e78ce99ce042b0254a368f1d7832dc94ddefb" }, { "contractName": "bootloader_test", "bytecodePath": "bootloader/build/artifacts/bootloader_test.yul.zbin", "sourceCodePath": "bootloader/build/bootloader_test.yul", - "bytecodeHash": "0x010003cbf67ee7370dd2e77fb9ad39f718ded9354be174ea3009c6cb4fb8c06d", - "sourceCodeHash": "0x232e09be0ce4a92a3b77558e5724ab67e9deaf68e12e8be682a999655203b066" + "bytecodeHash": "0x010003cb2fe407ac312db0aa6a6e746ca41cd19ab0eea216a14ec725f5cc7444", + "sourceCodeHash": "0xd7748f25eeb4f17b5d5bc09172f09ffdd9b8a34d011b3fb62aac5b494231b168" }, { "contractName": "fee_estimate", "bytecodePath": "bootloader/build/artifacts/fee_estimate.yul.zbin", "sourceCodePath": "bootloader/build/fee_estimate.yul", - "bytecodeHash": "0x01000951968c701c02714779299712d9da6e400e56c78d0d07acd984bfe7242a", - "sourceCodeHash": "0x9b2d51a24186af7ef58f7c8f53d77f6732f3a8d2dbde556fc8c1152957855fa5" + "bytecodeHash": "0x01000931a58a1d205bdf0b87674e56f96bb5f2192173c96a07886121b6867c47", + "sourceCodeHash": "0x67877a2bd129d189c32e63333325fff1e0ee19650a270b6bfa55906e1eaa79d6" }, { "contractName": "gas_test", "bytecodePath": "bootloader/build/artifacts/gas_test.yul.zbin", "sourceCodePath": "bootloader/build/gas_test.yul", - "bytecodeHash": "0x010008d7dffe019f801bf2ee23b93f83afd80ea6d20c8efe82da71fd57cbcb5c", - "sourceCodeHash": "0xf6624fe716eec6bcd5d513f069f33d758271b304009d2bdf5c5b7d0573868a1c" + "bytecodeHash": "0x010008b7e13ae7b54e537ea6f7b4e030f7b3c81e44b05f41dea2eb13c19e6235", + "sourceCodeHash": "0xa173ad90cabe1b3431ee803b2c9b3943ece686c98df1600dad4cec28f5a027c8" }, { "contractName": "playground_batch", "bytecodePath": "bootloader/build/artifacts/playground_batch.yul.zbin", "sourceCodePath": "bootloader/build/playground_batch.yul", - "bytecodeHash": "0x01000957420977a293aab097a368f36b123247d87d4695a6cd27ac62598ab171", - "sourceCodeHash": "0x23293faa6627f60f8b4d61657c615cb2327162dd1e33c0968e9ab4d5dd605a20" + "bytecodeHash": "0x010009358206ce648b88a76f3199a3ea0c0e1183a9ebfca11cdcba924453db98", + "sourceCodeHash": "0xda17354bca78e6b816ce8f7d1d7ff45e1c5ed0cd9f6ea0437b7cac614ff2019c" }, { "contractName": "proved_batch", "bytecodePath": "bootloader/build/artifacts/proved_batch.yul.zbin", "sourceCodePath": "bootloader/build/proved_batch.yul", - "bytecodeHash": "0x010008e742608b21bf7eb23c1a9d0602047e3618b464c9b59c0fba3b3d7ab66e", - "sourceCodeHash": "0x8ac4971296d0546fc6366caa4089489177656cbc33cc21247947d98c28c6dee4" + "bytecodeHash": "0x010008c753336bc8d1ddca235602b9f31d346412b2d463cd342899f7bfb73baf", + "sourceCodeHash": "0x7a5776e8001a2d93e14165af0b08cadcf1170354401d0c31fdc0d2a8f8439989" } ] diff --git a/system-contracts/bootloader/bootloader.yul b/system-contracts/bootloader/bootloader.yul index 85cc33dda..1c58a3bf7 100644 --- a/system-contracts/bootloader/bootloader.yul +++ b/system-contracts/bootloader/bootloader.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + object "Bootloader" { code { } @@ -11,14 +13,15 @@ object "Bootloader" { // While we definitely cannot control the pubdata price on L1, // we need to check the operator does not provide any absurd numbers there + // These number were chosen to allow for base tokens with low token/eth conversion. function MAX_ALLOWED_FAIR_PUBDATA_PRICE() -> ret { - // 1M gwei - ret := 1000000000000000 + // 2^64 - 1 wei + ret := 18446744073709551615 } function MAX_ALLOWED_FAIR_L2_GAS_PRICE() -> ret { - // 10k gwei - ret := 10000000000000 + // 2^64 - 1 wei + ret := 18446744073709551615 } /// @dev This method ensures that the prices provided by the operator @@ -619,10 +622,10 @@ object "Bootloader" { switch isETHCall case 1 { let gasLimitForTx, reservedGas := getGasLimitForTx( - innerTxDataOffset, - transactionIndex, + innerTxDataOffset, + transactionIndex, gasPerPubdata, - L2_TX_INTRINSIC_GAS(), + L2_TX_INTRINSIC_GAS(), L2_TX_INTRINSIC_PUBDATA() ) @@ -691,63 +694,6 @@ object "Bootloader" { ret := mload(0) } - /// @dev The function that is temporarily needed to upgrade the SystemContext system contract. This function is to be removed - /// once the upgrade is complete. - /// @dev Checks whether the code hash of the SystemContext contract is correct and updates it if needed. - /// @dev The bootloader calls `setPubdataInfo` before each transaction, including the upgrade one. - /// However, the old SystemContext does not have this method. So the bootloader should invoke this function - /// before starting any transaction. - function upgradeSystemContextIfNeeded() { - let expectedCodeHash := {{SYSTEM_CONTEXT_EXPECTED_CODE_HASH}} - - let actualCodeHash := getRawCodeHash(SYSTEM_CONTEXT_ADDR(), true) - if iszero(eq(expectedCodeHash, actualCodeHash)) { - // Now, we need to encode the call to the `ContractDeployer.forceDeployOnAddresses()` function. - - // The `mimicCallOnlyResult` requires that the first word of the data - // contains its length. Here it is 292 bytes. - mstore(0, 292) - mstore(32, {{PADDED_FORCE_DEPLOY_ON_ADDRESSES_SELECTOR}}) - - // The 0x20 offset, for the array of forced deployments - mstore(36, 0x0000000000000000000000000000000000000000000000000000000000000020) - // Only one force deployment - mstore(68, 0x0000000000000000000000000000000000000000000000000000000000000001) - - // Now, starts the description of the forced deployment itself. - // Firstly, the offset. - mstore(100, 0x0000000000000000000000000000000000000000000000000000000000000020) - // The new hash of the SystemContext contract. - mstore(132, expectedCodeHash) - // The address of the system context - mstore(164, SYSTEM_CONTEXT_ADDR()) - // The constructor must be called to reset the `blockGasLimit` variable - mstore(196, 0x0000000000000000000000000000000000000000000000000000000000000001) - // The value should be 0. - mstore(228, 0x0000000000000000000000000000000000000000000000000000000000000000) - // The offset of the input array. - mstore(260, 0x00000000000000000000000000000000000000000000000000000000000000a0) - // No input is provided, the array is empty. - mstore(292, 0x0000000000000000000000000000000000000000000000000000000000000000) - - // We'll use a mimicCall to simulate the correct sender. - let success := mimicCallOnlyResult( - CONTRACT_DEPLOYER_ADDR(), - FORCE_DEPLOYER(), - 0, - 0, - 0, - 0, - 0, - 0 - ) - - if iszero(success) { - assertionError("system context upgrade fail") - } - } - } - /// @dev Calculates the canonical hash of the L1->L2 transaction that will be /// sent to L1 as a message to the L1 contract that a certain operation has been processed. function getCanonicalL1TxHash(txDataOffset) -> ret { @@ -1335,7 +1281,7 @@ object "Bootloader" { /// @param gasLimitForTx The L2 gas limit for the transaction validation & execution. /// @param gasPrice The L2 gas price that should be used by the transaction. /// @param basePubdataSpent The amount of pubdata spent at the beginning of the transaction. - /// @param reservedGas The amount of gas reserved for the pubdata. + /// @param reservedGas The amount of gas reserved for the pubdata. /// @param gasPerPubdata The price of each byte of pubdata in L2 gas. /// @return gasLeft The gas left after the validation step. function l2TxValidation( @@ -1399,7 +1345,7 @@ object "Bootloader" { /// @param txDataOffset The offset to the ABI-encoded Transaction struct. /// @param gasLeft The gas left after the validation step. /// @param basePubdataSpent The amount of pubdata spent at the beginning of the transaction. - /// @param reservedGas The amount of gas reserved for the pubdata. + /// @param reservedGas The amount of gas reserved for the pubdata. /// @param gasPerPubdata The price of each byte of pubdata in L2 gas. /// @return success Whether or not the execution step was successful. /// @return gasSpentOnExecute The gas spent on the transaction execution. @@ -1496,7 +1442,7 @@ object "Bootloader" { /// @param abi The nearCall ABI. It is implicitly used as gasLimit for the call of this function. /// @param txDataOffset The offset to the ABI-encoded Transaction struct. /// @param basePubdataSpent The amount of pubdata spent at the beginning of the transaction. - /// @param reservedGas The amount of gas reserved for the pubdata. + /// @param reservedGas The amount of gas reserved for the pubdata. /// @param gasPerPubdata The price of each byte of pubdata in L2 gas. function ZKSYNC_NEAR_CALL_executeL2Tx( abi, @@ -1539,7 +1485,7 @@ object "Bootloader" { /// @param abi The nearCall ABI. It is implicitly used as gasLimit for the call of this function. /// @param txDataOffset The offset to the ABI-encoded Transaction struct. /// @param basePubdataSpent The amount of pubdata spent at the beginning of the transaction. - /// @param reservedGas The amount of gas reserved for the pubdata. + /// @param reservedGas The amount of gas reserved for the pubdata. /// @param gasPerPubdata The price of each byte of pubdata in L2 gas. function ZKSYNC_NEAR_CALL_markFactoryDepsL2( abi, @@ -1891,7 +1837,7 @@ object "Bootloader" { debugLog("from", from) debugLog("gasPrice", gasPrice) - // We assume that addresses of smart contracts on zkSync and Ethereum + // We assume that addresses of smart contracts on ZKsync and Ethereum // never overlap, so no need to check whether `from` is an EOA here. debugLog("setting tx origin", from) @@ -2324,7 +2270,7 @@ object "Bootloader" { /// @param maxRefundedGas The maximum number of gas the bootloader can be refunded. /// @param basePubdataSpent The amount of pubdata spent at the beginning of the transaction. /// @param gasPerPubdata The price of each byte of pubdata in L2 gas. - /// @param reservedGas The amount of gas reserved for the pubdata. + /// @param reservedGas The amount of gas reserved for the pubdata. /// This is the `maximum` number because it does not take into account the number of gas that /// can be spent by the paymaster itself. function ZKSYNC_NEAR_CALL_callPostOp( @@ -2608,7 +2554,7 @@ object "Bootloader" { } /// - /// zkSync-specific utilities: + /// ZKsync-specific utilities: /// /// @dev Returns an ABI that can be used for low-level @@ -2730,15 +2676,23 @@ object "Bootloader" { function l1MessengerPublishingCall() { let ptr := OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_BEGIN_BYTE() debugLog("Publishing batch data to L1", 0) + + setHook(VM_HOOK_PUBDATA_REQUESTED()) + // First slot (only last 4 bytes) -- selector mstore(ptr, {{PUBLISH_PUBDATA_SELECTOR}}) - // Second slot -- offset - mstore(add(ptr, 32), 32) - setHook(VM_HOOK_PUBDATA_REQUESTED()) + // Second slot is occupied by the address of the L2 DA validator. + // The operator can provide any one it wants. It will be the responsibility of the + // L1Messenger system contract to send the corresponding log to L1. + // + // Third slot -- offset. The correct value must be equal to 64 + assertEq(mload(add(ptr, 64)), 64, "offset for L1Messenger is not 64") + // Third slot -- length of pubdata - let len := mload(add(ptr, 64)) - // 4 bytes for selector, 32 bytes for array offset and 32 bytes for array length - let fullLen := add(len, 68) + let len := mload(add(ptr, 96)) + // 4 bytes for selector, 32 bytes for ABI-encoded L2 DA validator address, + // 32 bytes for array offset and 32 bytes for array length + let fullLen := add(len, 100) // ptr + 28 because the function selector only takes up the last 4 bytes in the first slot. let success := call( @@ -2845,7 +2799,7 @@ object "Bootloader" { let spentErgs := getErgsSpentForPubdata(basePubdataSpent, gasPerPubdata) debugLog("spentErgsPubdata", spentErgs) let allowedGasLimit := add(computeGas, reservedGas) - + ret := lt(allowedGasLimit, spentErgs) } @@ -3635,7 +3589,7 @@ object "Bootloader" { } /// @dev Asks operator for the refund for the transaction. The function provides - /// the operator with the proposed refund gas by the bootloader, + /// the operator with the proposed refund gas by the bootloader, /// total spent gas on the pubdata and gas per 1 byte of pubdata. /// This function is called before the refund stage, because at that point /// only the operator knows how close does a transaction @@ -3946,17 +3900,17 @@ object "Bootloader" { /// @dev Log key used by Executor.sol for processing. See Constants.sol::SystemLogKey enum function chainedPriorityTxnHashLogKey() -> ret { - ret := 5 + ret := 3 } /// @dev Log key used by Executor.sol for processing. See Constants.sol::SystemLogKey enum function numberOfLayer1TxsLogKey() -> ret { - ret := 6 + ret := 4 } /// @dev Log key used by Executor.sol for processing. See Constants.sol::SystemLogKey enum function protocolUpgradeTxHashKey() -> ret { - ret := 13 + ret := 7 } //////////////////////////////////////////////////////////////////////////// @@ -4018,8 +3972,6 @@ object "Bootloader" { assertionError("baseFee inconsistent") } - upgradeSystemContextIfNeeded() - setNewBatch(PREV_BATCH_HASH, NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) @@ -4028,8 +3980,6 @@ object "Bootloader" { let SHOULD_SET_NEW_BATCH := mload(224) - upgradeSystemContextIfNeeded() - switch SHOULD_SET_NEW_BATCH case 0 { unsafeOverrideBatch(NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) diff --git a/system-contracts/bootloader/tests/bootloader/bootloader_test.yul b/system-contracts/bootloader/tests/bootloader/bootloader_test.yul index ed506fcea..9e620fccf 100644 --- a/system-contracts/bootloader/tests/bootloader/bootloader_test.yul +++ b/system-contracts/bootloader/tests/bootloader/bootloader_test.yul @@ -105,7 +105,7 @@ function TEST_systemLogKeys() { let numberOfLayer1TxsLogKey := numberOfLayer1TxsLogKey() let protocolUpgradeTxHashKey := protocolUpgradeTxHashKey() - testing_assertEq(chainedPriorityTxnHashLogKey, 5, "Invalid priority txn hash log key") - testing_assertEq(numberOfLayer1TxsLogKey, 6, "Invalid num layer 1 txns log key") - testing_assertEq(protocolUpgradeTxHashKey, 13, "Invalid protocol upgrade txn hash log key") + testing_assertEq(chainedPriorityTxnHashLogKey, 3, "Invalid priority txn hash log key") + testing_assertEq(numberOfLayer1TxsLogKey, 4, "Invalid num layer 1 txns log key") + testing_assertEq(protocolUpgradeTxHashKey, 7, "Invalid protocol upgrade txn hash log key") } diff --git a/system-contracts/contracts/AccountCodeStorage.sol b/system-contracts/contracts/AccountCodeStorage.sol index 399ea54f5..4c55279c4 100644 --- a/system-contracts/contracts/AccountCodeStorage.sol +++ b/system-contracts/contracts/AccountCodeStorage.sol @@ -1,10 +1,11 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IAccountCodeStorage} from "./interfaces/IAccountCodeStorage.sol"; import {Utils} from "./libraries/Utils.sol"; import {DEPLOYER_SYSTEM_CONTRACT, NONCE_HOLDER_SYSTEM_CONTRACT, CURRENT_MAX_PRECOMPILE_ADDRESS} from "./Constants.sol"; +import {Unauthorized, InvalidCodeHash, CodeHashReason} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -13,17 +14,19 @@ import {DEPLOYER_SYSTEM_CONTRACT, NONCE_HOLDER_SYSTEM_CONTRACT, CURRENT_MAX_PREC * @dev Code hash is not strictly a hash, it's a structure where the first byte denotes the version of the hash, * the second byte denotes whether the contract is constructed, and the next two bytes denote the length in 32-byte words. * And then the next 28 bytes are the truncated hash. - * @dev In this version of zkSync, the first byte of the hash MUST be 1. + * @dev In this version of ZKsync, the first byte of the hash MUST be 1. * @dev The length of each bytecode MUST be odd. It's internal code format requirements, due to padding of SHA256 function. * @dev It is also assumed that all the bytecode hashes are *known*, i.e. the full bytecodes * were published on L1 as calldata. This contract trusts the ContractDeployer and the KnownCodesStorage * system contracts to enforce the invariants mentioned above. */ contract AccountCodeStorage is IAccountCodeStorage { - bytes32 constant EMPTY_STRING_KECCAK = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470; + bytes32 private constant EMPTY_STRING_KECCAK = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470; modifier onlyDeployer() { - require(msg.sender == address(DEPLOYER_SYSTEM_CONTRACT), "Callable only by the deployer system contract"); + if (msg.sender != address(DEPLOYER_SYSTEM_CONTRACT)) { + revert Unauthorized(msg.sender); + } _; } @@ -34,7 +37,9 @@ contract AccountCodeStorage is IAccountCodeStorage { /// but checks whether the bytecode hash corresponds to the constructing smart contract. function storeAccountConstructingCodeHash(address _address, bytes32 _hash) external override onlyDeployer { // Check that code hash corresponds to the deploying smart contract - require(Utils.isContractConstructing(_hash), "Code hash is not for a contract on constructor"); + if (!Utils.isContractConstructing(_hash)) { + revert InvalidCodeHash(CodeHashReason.NotContractOnConstructor); + } _storeCodeHash(_address, _hash); } @@ -45,7 +50,9 @@ contract AccountCodeStorage is IAccountCodeStorage { /// but checks whether the bytecode hash corresponds to the constructed smart contract. function storeAccountConstructedCodeHash(address _address, bytes32 _hash) external override onlyDeployer { // Check that code hash corresponds to the deploying smart contract - require(Utils.isContractConstructed(_hash), "Code hash is not for a constructed contract"); + if (!Utils.isContractConstructed(_hash)) { + revert InvalidCodeHash(CodeHashReason.NotConstructedContract); + } _storeCodeHash(_address, _hash); } @@ -54,7 +61,9 @@ contract AccountCodeStorage is IAccountCodeStorage { function markAccountCodeHashAsConstructed(address _address) external override onlyDeployer { bytes32 codeHash = getRawCodeHash(_address); - require(Utils.isContractConstructing(codeHash), "Code hash is not for a contract on constructor"); + if (!Utils.isContractConstructing(codeHash)) { + revert InvalidCodeHash(CodeHashReason.NotContractOnConstructor); + } // Get the bytecode hash with "isConstructor" flag equal to false bytes32 constructedBytecodeHash = Utils.constructedBytecodeHash(codeHash); diff --git a/system-contracts/contracts/BootloaderUtilities.sol b/system-contracts/contracts/BootloaderUtilities.sol index 5551764dd..4fd38da74 100644 --- a/system-contracts/contracts/BootloaderUtilities.sol +++ b/system-contracts/contracts/BootloaderUtilities.sol @@ -1,11 +1,12 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IBootloaderUtilities} from "./interfaces/IBootloaderUtilities.sol"; import {Transaction, TransactionHelper, EIP_712_TX_TYPE, LEGACY_TX_TYPE, EIP_2930_TX_TYPE, EIP_1559_TX_TYPE} from "./libraries/TransactionHelper.sol"; import {RLPEncoder} from "./libraries/RLPEncoder.sol"; import {EfficientCall} from "./libraries/EfficientCall.sol"; +import {UnsupportedTxType, InvalidSig, SigField} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -34,7 +35,7 @@ contract BootloaderUtilities is IBootloaderUtilities { } else if (_transaction.txType == EIP_2930_TX_TYPE) { txHash = encodeEIP2930TransactionHash(_transaction); } else { - revert("Unsupported tx type"); + revert UnsupportedTxType(_transaction.txType); } } @@ -89,7 +90,9 @@ contract BootloaderUtilities is IBootloaderUtilities { bytes memory vEncoded; { uint256 vInt = uint256(uint8(_transaction.signature[64])); - require(vInt == 27 || vInt == 28, "Invalid v value"); + if (vInt != 27 && vInt != 28) { + revert InvalidSig(SigField.V, vInt); + } // If the `chainId` is specified in the transaction, then the `v` value is encoded as // `35 + y + 2 * chainId == vInt + 8 + 2 * chainId`, where y - parity bit (see EIP-155). @@ -174,7 +177,7 @@ contract BootloaderUtilities is IBootloaderUtilities { // Otherwise the length is not encoded at all. } - // On zkSync, access lists are always zero length (at least for now). + // On ZKsync, access lists are always zero length (at least for now). bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); bytes memory rEncoded; @@ -190,7 +193,9 @@ contract BootloaderUtilities is IBootloaderUtilities { bytes memory vEncoded; { uint256 vInt = uint256(uint8(_transaction.signature[64])); - require(vInt == 27 || vInt == 28, "Invalid v value"); + if (vInt != 27 && vInt != 28) { + revert InvalidSig(SigField.V, vInt); + } vEncoded = RLPEncoder.encodeUint256(vInt - 27); } @@ -271,7 +276,7 @@ contract BootloaderUtilities is IBootloaderUtilities { // Otherwise the length is not encoded at all. } - // On zkSync, access lists are always zero length (at least for now). + // On ZKsync, access lists are always zero length (at least for now). bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); bytes memory rEncoded; @@ -287,7 +292,9 @@ contract BootloaderUtilities is IBootloaderUtilities { bytes memory vEncoded; { uint256 vInt = uint256(uint8(_transaction.signature[64])); - require(vInt == 27 || vInt == 28, "Invalid v value"); + if (vInt != 27 && vInt != 28) { + revert InvalidSig(SigField.V, vInt); + } vEncoded = RLPEncoder.encodeUint256(vInt - 27); } diff --git a/system-contracts/contracts/ComplexUpgrader.sol b/system-contracts/contracts/ComplexUpgrader.sol index 2f4d886cd..a69545148 100644 --- a/system-contracts/contracts/ComplexUpgrader.sol +++ b/system-contracts/contracts/ComplexUpgrader.sol @@ -1,9 +1,10 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IComplexUpgrader} from "./interfaces/IComplexUpgrader.sol"; import {FORCE_DEPLOYER} from "./Constants.sol"; +import {Unauthorized, AddressHasNoCode} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -19,9 +20,13 @@ contract ComplexUpgrader is IComplexUpgrader { /// @param _delegateTo the address of the contract to which the calls will be delegated /// @param _calldata the calldata to be delegate called in the `_delegateTo` contract function upgrade(address _delegateTo, bytes calldata _calldata) external payable { - require(msg.sender == FORCE_DEPLOYER, "Can only be called by FORCE_DEPLOYER"); + if (msg.sender != FORCE_DEPLOYER) { + revert Unauthorized(msg.sender); + } - require(_delegateTo.code.length > 0, "Delegatee is an EOA"); + if (_delegateTo.code.length == 0) { + revert AddressHasNoCode(_delegateTo); + } (bool success, bytes memory returnData) = _delegateTo.delegatecall(_calldata); assembly { if iszero(success) { diff --git a/system-contracts/contracts/Compressor.sol b/system-contracts/contracts/Compressor.sol index f52c18ed4..f74b0a03d 100644 --- a/system-contracts/contracts/Compressor.sol +++ b/system-contracts/contracts/Compressor.sol @@ -1,13 +1,14 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {ICompressor, OPERATION_BITMASK, LENGTH_BITS_OFFSET, MAX_ENUMERATION_INDEX_SIZE} from "./interfaces/ICompressor.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; import {Utils} from "./libraries/Utils.sol"; import {UnsafeBytesCalldata} from "./libraries/UnsafeBytesCalldata.sol"; import {EfficientCall} from "./libraries/EfficientCall.sol"; import {L1_MESSENGER_CONTRACT, STATE_DIFF_ENTRY_SIZE, KNOWN_CODE_STORAGE_CONTRACT} from "./Constants.sol"; +import {DerivedKeyNotEqualToCompressedValue, EncodedAndRealBytecodeChunkNotEqual, DictionaryDividedByEightNotGreaterThanEncodedDividedByTwo, EncodedLengthNotFourTimesSmallerThanOriginal, IndexOutOfBounds, IndexSizeError, UnsupportedOperation, CompressorInitialWritesProcessedNotEqual, CompressorEnumIndexNotEqual, StateDiffLengthMismatch, CompressionValueTransformError, CompressionValueAddError, CompressionValueSubError} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -19,7 +20,7 @@ import {L1_MESSENGER_CONTRACT, STATE_DIFF_ENTRY_SIZE, KNOWN_CODE_STORAGE_CONTRAC * Or the user may compress the bytecode and publish it instead (fewer data onchain!). At the end of every L1 Batch * we publish pubdata, part of which contains the state diffs that occurred within the batch. */ -contract Compressor is ICompressor, ISystemContract { +contract Compressor is ICompressor, SystemContractBase { using UnsafeBytesCalldata for bytes; /// @notice Verify the compressed bytecode and publish it on the L1. @@ -48,24 +49,28 @@ contract Compressor is ICompressor, ISystemContract { unchecked { (bytes calldata dictionary, bytes calldata encodedData) = _decodeRawBytecode(_rawCompressedData); - require( - encodedData.length * 4 == _bytecode.length, - "Encoded data length should be 4 times shorter than the original bytecode" - ); + if (encodedData.length * 4 != _bytecode.length) { + revert EncodedLengthNotFourTimesSmallerThanOriginal(); + } - require( - dictionary.length / 8 <= encodedData.length / 2, - "Dictionary should have at most the same number of entries as the encoded data" - ); + if (dictionary.length / 8 > encodedData.length / 2) { + revert DictionaryDividedByEightNotGreaterThanEncodedDividedByTwo(); + } + // We disable this check because calldata array length is cheap. + // solhint-disable-next-line gas-length-in-loops for (uint256 encodedDataPointer = 0; encodedDataPointer < encodedData.length; encodedDataPointer += 2) { uint256 indexOfEncodedChunk = uint256(encodedData.readUint16(encodedDataPointer)) * 8; - require(indexOfEncodedChunk < dictionary.length, "Encoded chunk index is out of bounds"); + if (indexOfEncodedChunk > dictionary.length - 1) { + revert IndexOutOfBounds(); + } uint64 encodedChunk = dictionary.readUint64(indexOfEncodedChunk); uint64 realChunk = _bytecode.readUint64(encodedDataPointer * 4); - require(encodedChunk == realChunk, "Encoded chunk does not match the original bytecode"); + if (encodedChunk != realChunk) { + revert EncodedAndRealBytecodeChunkNotEqual(realChunk, encodedChunk); + } } } @@ -112,11 +117,13 @@ contract Compressor is ICompressor, ISystemContract { uint256 _enumerationIndexSize, bytes calldata _stateDiffs, bytes calldata _compressedStateDiffs - ) external onlyCallFrom(address(L1_MESSENGER_CONTRACT)) returns (bytes32 stateDiffHash) { + ) external view returns (bytes32 stateDiffHash) { // We do not enforce the operator to use the optimal, i.e. the minimally possible _enumerationIndexSize. // We do enforce however, that the _enumerationIndexSize is not larger than 8 bytes long, which is the // maximal ever possible size for enumeration index. - require(_enumerationIndexSize <= MAX_ENUMERATION_INDEX_SIZE, "enumeration index size is too large"); + if (_enumerationIndexSize > MAX_ENUMERATION_INDEX_SIZE) { + revert IndexSizeError(); + } uint256 numberOfInitialWrites = uint256(_compressedStateDiffs.readUint16(0)); @@ -132,16 +139,19 @@ contract Compressor is ICompressor, ISystemContract { continue; } - numInitialWritesProcessed++; + ++numInitialWritesProcessed; bytes32 derivedKey = stateDiff.readBytes32(52); uint256 initValue = stateDiff.readUint256(92); uint256 finalValue = stateDiff.readUint256(124); - require(derivedKey == _compressedStateDiffs.readBytes32(stateDiffPtr), "iw: initial key mismatch"); + bytes32 compressedDerivedKey = _compressedStateDiffs.readBytes32(stateDiffPtr); + if (derivedKey != compressedDerivedKey) { + revert DerivedKeyNotEqualToCompressedValue(derivedKey, compressedDerivedKey); + } stateDiffPtr += 32; uint8 metadata = uint8(bytes1(_compressedStateDiffs[stateDiffPtr])); - stateDiffPtr++; + ++stateDiffPtr; uint8 operation = metadata & OPERATION_BITMASK; uint8 len = operation == 0 ? 32 : metadata >> LENGTH_BITS_OFFSET; _verifyValueCompression( @@ -153,7 +163,9 @@ contract Compressor is ICompressor, ISystemContract { stateDiffPtr += len; } - require(numInitialWritesProcessed == numberOfInitialWrites, "Incorrect number of initial storage diffs"); + if (numInitialWritesProcessed != numberOfInitialWrites) { + revert CompressorInitialWritesProcessedNotEqual(numberOfInitialWrites, numInitialWritesProcessed); + } // Process repeated writes for (uint256 i = 0; i < _numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; i += STATE_DIFF_ENTRY_SIZE) { @@ -168,11 +180,13 @@ contract Compressor is ICompressor, ISystemContract { uint256 compressedEnumIndex = _sliceToUint256( _compressedStateDiffs[stateDiffPtr:stateDiffPtr + _enumerationIndexSize] ); - require(enumIndex == compressedEnumIndex, "rw: enum key mismatch"); + if (enumIndex != compressedEnumIndex) { + revert CompressorEnumIndexNotEqual(enumIndex, compressedEnumIndex); + } stateDiffPtr += _enumerationIndexSize; uint8 metadata = uint8(bytes1(_compressedStateDiffs[stateDiffPtr])); - stateDiffPtr += 1; + ++stateDiffPtr; uint8 operation = metadata & OPERATION_BITMASK; uint8 len = operation == 0 ? 32 : metadata >> LENGTH_BITS_OFFSET; _verifyValueCompression( @@ -184,7 +198,9 @@ contract Compressor is ICompressor, ISystemContract { stateDiffPtr += len; } - require(stateDiffPtr == _compressedStateDiffs.length, "Extra data in _compressedStateDiffs"); + if (stateDiffPtr != _compressedStateDiffs.length) { + revert StateDiffLengthMismatch(); + } stateDiffHash = EfficientCall.keccak(_stateDiffs); } @@ -227,19 +243,19 @@ contract Compressor is ICompressor, ISystemContract { unchecked { if (_operation == 0 || _operation == 3) { - require(convertedValue == _finalValue, "transform or no compression: compressed and final mismatch"); + if (convertedValue != _finalValue) { + revert CompressionValueTransformError(_finalValue, convertedValue); + } } else if (_operation == 1) { - require( - _initialValue + convertedValue == _finalValue, - "add: initial plus converted not equal to final" - ); + if (_initialValue + convertedValue != _finalValue) { + revert CompressionValueAddError(_finalValue, _initialValue + convertedValue); + } } else if (_operation == 2) { - require( - _initialValue - convertedValue == _finalValue, - "sub: initial minus converted not equal to final" - ); + if (_initialValue - convertedValue != _finalValue) { + revert CompressionValueSubError(_finalValue, _initialValue - convertedValue); + } } else { - revert("unsupported operation"); + revert UnsupportedOperation(); } } } diff --git a/system-contracts/contracts/Constants.sol b/system-contracts/contracts/Constants.sol index 0f8e2307f..072c6eab9 100644 --- a/system-contracts/contracts/Constants.sol +++ b/system-contracts/contracts/Constants.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {IAccountCodeStorage} from "./interfaces/IAccountCodeStorage.sol"; import {INonceHolder} from "./interfaces/INonceHolder.sol"; @@ -8,14 +8,16 @@ import {IContractDeployer} from "./interfaces/IContractDeployer.sol"; import {IKnownCodesStorage} from "./interfaces/IKnownCodesStorage.sol"; import {IImmutableSimulator} from "./interfaces/IImmutableSimulator.sol"; import {IBaseToken} from "./interfaces/IBaseToken.sol"; +import {IBridgehub} from "./interfaces/IBridgehub.sol"; import {IL1Messenger} from "./interfaces/IL1Messenger.sol"; import {ISystemContext} from "./interfaces/ISystemContext.sol"; import {ICompressor} from "./interfaces/ICompressor.sol"; import {IComplexUpgrader} from "./interfaces/IComplexUpgrader.sol"; import {IBootloaderUtilities} from "./interfaces/IBootloaderUtilities.sol"; import {IPubdataChunkPublisher} from "./interfaces/IPubdataChunkPublisher.sol"; +import {IMessageRoot} from "./interfaces/IMessageRoot.sol"; -/// @dev All the system contracts introduced by zkSync have their addresses +/// @dev All the system contracts introduced by ZKsync have their addresses /// started from 2^15 in order to avoid collision with Ethereum precompiles. uint160 constant SYSTEM_CONTRACTS_OFFSET = {{SYSTEM_CONTRACTS_OFFSET}}; // 2^15 @@ -24,10 +26,14 @@ uint160 constant SYSTEM_CONTRACTS_OFFSET = {{SYSTEM_CONTRACTS_OFFSET}}; // 2^15 /// mainnet. uint160 constant REAL_SYSTEM_CONTRACTS_OFFSET = 0x8000; + /// @dev All the system contracts must be located in the kernel space, /// i.e. their addresses must be below 2^16. uint160 constant MAX_SYSTEM_CONTRACT_ADDRESS = 0xffff; // 2^16 - 1 +/// @dev The offset from which the built-in, but user space contracts are located. +uint160 constant USER_CONTRACTS_OFFSET = MAX_SYSTEM_CONTRACT_ADDRESS + 1; + address constant ECRECOVER_SYSTEM_CONTRACT = address(0x01); address constant SHA256_SYSTEM_CONTRACT = address(0x02); address constant ECADD_SYSTEM_CONTRACT = address(0x06); @@ -35,7 +41,7 @@ address constant ECMUL_SYSTEM_CONTRACT = address(0x07); address constant ECPAIRING_SYSTEM_CONTRACT = address(0x08); -/// @dev The number of ergs that need to be spent for a single byte of pubdata regardless of the pubdata price. +/// @dev The number of gas that need to be spent for a single byte of pubdata regardless of the pubdata price. /// This variable is used to ensure the following: /// - That the long-term storage of the operator is compensated properly. /// - That it is not possible that the pubdata counter grows too high without spending proportional amount of computation. @@ -67,6 +73,11 @@ address constant MSG_VALUE_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0 IBaseToken constant BASE_TOKEN_SYSTEM_CONTRACT = IBaseToken(address(SYSTEM_CONTRACTS_OFFSET + 0x0a)); IBaseToken constant REAL_BASE_TOKEN_SYSTEM_CONTRACT = IBaseToken(address(REAL_SYSTEM_CONTRACTS_OFFSET + 0x0a)); +address constant L2_ASSET_ROUTER = address(USER_CONTRACTS_OFFSET + 0x03); +IBridgehub constant L2_BRIDGE_HUB = IBridgehub(address(USER_CONTRACTS_OFFSET + 0x02)); +address constant L2_NATIVE_TOKEN_VAULT_ADDR = address(USER_CONTRACTS_OFFSET + 0x04); +IMessageRoot constant L2_MESSAGE_ROOT = IMessageRoot(address(USER_CONTRACTS_OFFSET + 0x05)); + // Hardcoded because even for tests we should keep the address. (Instead `SYSTEM_CONTRACTS_OFFSET + 0x10`) // Precompile call depends on it. // And we don't want to mock this contract. @@ -107,18 +118,12 @@ uint256 constant STATE_DIFF_ENTRY_SIZE = 272; enum SystemLogKey { L2_TO_L1_LOGS_TREE_ROOT_KEY, - TOTAL_L2_TO_L1_PUBDATA_KEY, - STATE_DIFF_HASH_KEY, PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, PREV_BATCH_HASH_KEY, CHAINED_PRIORITY_TXN_HASH_KEY, NUMBER_OF_LAYER_1_TXS_KEY, - BLOB_ONE_HASH_KEY, - BLOB_TWO_HASH_KEY, - BLOB_THREE_HASH_KEY, - BLOB_FOUR_HASH_KEY, - BLOB_FIVE_HASH_KEY, - BLOB_SIX_HASH_KEY, + L2_DA_VALIDATOR_OUTPUT_HASH_KEY, + USED_L2_DA_VALIDATOR_ADDRESS_KEY, EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY } diff --git a/system-contracts/contracts/ContractDeployer.sol b/system-contracts/contracts/ContractDeployer.sol index 01009da19..ce87a58a6 100644 --- a/system-contracts/contracts/ContractDeployer.sol +++ b/system-contracts/contracts/ContractDeployer.sol @@ -1,32 +1,35 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {ImmutableData} from "./interfaces/IImmutableSimulator.sol"; -import {IContractDeployer} from "./interfaces/IContractDeployer.sol"; +import {IContractDeployer, ForceDeployment} from "./interfaces/IContractDeployer.sol"; import {CREATE2_PREFIX, CREATE_PREFIX, NONCE_HOLDER_SYSTEM_CONTRACT, ACCOUNT_CODE_STORAGE_SYSTEM_CONTRACT, FORCE_DEPLOYER, MAX_SYSTEM_CONTRACT_ADDRESS, KNOWN_CODE_STORAGE_CONTRACT, BASE_TOKEN_SYSTEM_CONTRACT, IMMUTABLE_SIMULATOR_SYSTEM_CONTRACT, COMPLEX_UPGRADER_CONTRACT} from "./Constants.sol"; import {Utils} from "./libraries/Utils.sol"; import {EfficientCall} from "./libraries/EfficientCall.sol"; import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; +import {Unauthorized, InvalidNonceOrderingChange, ValueMismatch, EmptyBytes32, NotAllowedToDeployInKernelSpace, HashIsNonZero, NonEmptyAccount, UnknownCodeHash, NonEmptyMsgValue} from "./SystemContractErrors.sol"; /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev - * @notice System smart contract that is responsible for deploying other smart contracts on zkSync. + * @notice System smart contract that is responsible for deploying other smart contracts on ZKsync. * @dev The contract is responsible for generating the address of the deployed smart contract, * incrementing the deployment nonce and making sure that the constructor is never called twice in a contract. * Note, contracts with bytecode that have already been published to L1 once * do not need to be published anymore. */ -contract ContractDeployer is IContractDeployer, ISystemContract { +contract ContractDeployer is IContractDeployer, SystemContractBase { /// @notice Information about an account contract. /// @dev For EOA and simple contracts (i.e. not accounts) this value is 0. mapping(address => AccountInfo) internal accountInfo; modifier onlySelf() { - require(msg.sender == address(this), "Callable only by self"); + if (msg.sender != address(this)) { + revert Unauthorized(msg.sender); + } _; } @@ -74,11 +77,12 @@ contract ContractDeployer is IContractDeployer, ISystemContract { function updateNonceOrdering(AccountNonceOrdering _nonceOrdering) external onlySystemCall { AccountInfo memory currentInfo = accountInfo[msg.sender]; - require( - _nonceOrdering == AccountNonceOrdering.Arbitrary && - currentInfo.nonceOrdering == AccountNonceOrdering.Sequential, - "It is only possible to change from sequential to arbitrary ordering" - ); + if ( + _nonceOrdering != AccountNonceOrdering.Arbitrary || + currentInfo.nonceOrdering != AccountNonceOrdering.Sequential + ) { + revert InvalidNonceOrderingChange(); + } currentInfo.nonceOrdering = _nonceOrdering; _storeAccountInfo(msg.sender, currentInfo); @@ -194,20 +198,6 @@ contract ContractDeployer is IContractDeployer, ISystemContract { return newAddress; } - /// @notice A struct that describes a forced deployment on an address - struct ForceDeployment { - // The bytecode hash to put on an address - bytes32 bytecodeHash; - // The address on which to deploy the bytecodehash to - address newAddress; - // Whether to run the constructor on the force deployment - bool callConstructor; - // The value with which to initialize a contract - uint256 value; - // The constructor calldata - bytes input; - } - /// @notice The method that can be used to forcefully deploy a contract. /// @param _deployment Information about the forced deployment. /// @param _sender The `msg.sender` inside the constructor call. @@ -236,11 +226,10 @@ contract ContractDeployer is IContractDeployer, ISystemContract { /// @notice This method is to be used only during an upgrade to set bytecodes on specific addresses. /// @dev We do not require `onlySystemCall` here, since the method is accessible only /// by `FORCE_DEPLOYER`. - function forceDeployOnAddresses(ForceDeployment[] calldata _deployments) external payable { - require( - msg.sender == FORCE_DEPLOYER || msg.sender == address(COMPLEX_UPGRADER_CONTRACT), - "Can only be called by FORCE_DEPLOYER or COMPLEX_UPGRADER_CONTRACT" - ); + function forceDeployOnAddresses(ForceDeployment[] calldata _deployments) external payable override { + if (msg.sender != FORCE_DEPLOYER && msg.sender != address(COMPLEX_UPGRADER_CONTRACT)) { + revert Unauthorized(msg.sender); + } uint256 deploymentsLength = _deployments.length; // We need to ensure that the `value` provided by the call is enough to provide `value` @@ -249,7 +238,9 @@ contract ContractDeployer is IContractDeployer, ISystemContract { for (uint256 i = 0; i < deploymentsLength; ++i) { sumOfValues += _deployments[i].value; } - require(msg.value == sumOfValues, "`value` provided is not equal to the combined `value`s of deployments"); + if (msg.value != sumOfValues) { + revert ValueMismatch(sumOfValues, msg.value); + } for (uint256 i = 0; i < deploymentsLength; ++i) { this.forceDeployOnAddress{value: _deployments[i].value}(_deployments[i], msg.sender); @@ -262,16 +253,22 @@ contract ContractDeployer is IContractDeployer, ISystemContract { AccountAbstractionVersion _aaVersion, bytes calldata _input ) internal { - require(_bytecodeHash != bytes32(0x0), "BytecodeHash cannot be zero"); - require(uint160(_newAddress) > MAX_SYSTEM_CONTRACT_ADDRESS, "Can not deploy contracts in kernel space"); + if (_bytecodeHash == bytes32(0x0)) { + revert EmptyBytes32(); + } + if (uint160(_newAddress) <= MAX_SYSTEM_CONTRACT_ADDRESS) { + revert NotAllowedToDeployInKernelSpace(); + } // We do not allow deploying twice on the same address. - require( - ACCOUNT_CODE_STORAGE_SYSTEM_CONTRACT.getCodeHash(uint256(uint160(_newAddress))) == 0x0, - "Code hash is non-zero" - ); + bytes32 codeHash = ACCOUNT_CODE_STORAGE_SYSTEM_CONTRACT.getCodeHash(uint256(uint160(_newAddress))); + if (codeHash != 0x0) { + revert HashIsNonZero(codeHash); + } // Do not allow deploying contracts to default accounts that have already executed transactions. - require(NONCE_HOLDER_SYSTEM_CONTRACT.getRawNonce(_newAddress) == 0x00, "Account is occupied"); + if (NONCE_HOLDER_SYSTEM_CONTRACT.getRawNonce(_newAddress) != 0x00) { + revert NonEmptyAccount(); + } _performDeployOnAddress(_bytecodeHash, _newAddress, _aaVersion, _input); } @@ -308,7 +305,9 @@ contract ContractDeployer is IContractDeployer, ISystemContract { /// @notice Check that bytecode hash is marked as known on the `KnownCodeStorage` system contracts function _ensureBytecodeIsKnown(bytes32 _bytecodeHash) internal view { uint256 knownCodeMarker = KNOWN_CODE_STORAGE_CONTRACT.getMarker(_bytecodeHash); - require(knownCodeMarker > 0, "The code hash is not known"); + if (knownCodeMarker == 0) { + revert UnknownCodeHash(_bytecodeHash); + } } /// @notice Ensures that the _newAddress and assigns a new contract hash to it @@ -362,7 +361,9 @@ contract ContractDeployer is IContractDeployer, ISystemContract { ImmutableData[] memory immutables = abi.decode(returnData, (ImmutableData[])); IMMUTABLE_SIMULATOR_SYSTEM_CONTRACT.setImmutables(_newAddress, immutables); } else { - require(value == 0, "The value must be zero if we do not call the constructor"); + if (value != 0) { + revert NonEmptyMsgValue(); + } // If we do not call the constructor, we need to set the constructed code hash. ACCOUNT_CODE_STORAGE_SYSTEM_CONTRACT.storeAccountConstructedCodeHash(_newAddress, _bytecodeHash); } diff --git a/system-contracts/contracts/Create2Factory.sol b/system-contracts/contracts/Create2Factory.sol index 6f68fbb52..868de66fa 100644 --- a/system-contracts/contracts/Create2Factory.sol +++ b/system-contracts/contracts/Create2Factory.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {REAL_DEPLOYER_SYSTEM_CONTRACT} from "./Constants.sol"; import {EfficientCall} from "./libraries/EfficientCall.sol"; diff --git a/system-contracts/contracts/DefaultAccount.sol b/system-contracts/contracts/DefaultAccount.sol index 4c7356dd8..40a38e49b 100644 --- a/system-contracts/contracts/DefaultAccount.sol +++ b/system-contracts/contracts/DefaultAccount.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IAccount, ACCOUNT_VALIDATION_SUCCESS_MAGIC} from "./interfaces/IAccount.sol"; import {TransactionHelper, Transaction} from "./libraries/TransactionHelper.sol"; @@ -9,6 +9,7 @@ import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; import {EfficientCall} from "./libraries/EfficientCall.sol"; import {BOOTLOADER_FORMAL_ADDRESS, NONCE_HOLDER_SYSTEM_CONTRACT, DEPLOYER_SYSTEM_CONTRACT, INonceHolder} from "./Constants.sol"; import {Utils} from "./libraries/Utils.sol"; +import {InsufficientFunds, InvalidSig, SigField, FailedToPayOperator} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -99,7 +100,9 @@ contract DefaultAccount is IAccount { // should be checked explicitly to prevent user paying for fee for a // transaction that wouldn't be included on Ethereum. uint256 totalRequiredBalance = _transaction.totalRequiredBalance(); - require(totalRequiredBalance <= address(this).balance, "Not enough balance for fee + value"); + if (totalRequiredBalance > address(this).balance) { + revert InsufficientFunds(totalRequiredBalance, address(this).balance); + } if (_isValidSignature(txHash, _transaction.signature)) { magic = ACCOUNT_VALIDATION_SUCCESS_MAGIC; @@ -165,7 +168,9 @@ contract DefaultAccount is IAccount { /// @param _signature The signature of the transaction. /// @return EIP1271_SUCCESS_RETURN_VALUE if the signature is correct. It reverts otherwise. function _isValidSignature(bytes32 _hash, bytes memory _signature) internal view returns (bool) { - require(_signature.length == 65, "Signature length is incorrect"); + if (_signature.length != 65) { + revert InvalidSig(SigField.Length, _signature.length); + } uint8 v; bytes32 r; bytes32 s; @@ -178,7 +183,9 @@ contract DefaultAccount is IAccount { s := mload(add(_signature, 0x40)) v := and(mload(add(_signature, 0x41)), 0xff) } - require(v == 27 || v == 28, "v is neither 27 nor 28"); + if (v != 27 && v != 28) { + revert InvalidSig(SigField.V, v); + } // EIP-2 still allows signature malleability for ecrecover(). Remove this possibility and make the signature // unique. Appendix F in the Ethereum Yellow paper (https://ethereum.github.io/yellowpaper/paper.pdf), defines @@ -189,7 +196,9 @@ contract DefaultAccount is IAccount { // with 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 - s1 and flip v from 27 to 28 or // vice versa. If your library also generates signatures with 0/1 for v instead 27/28, add 27 to v to accept // these malleable signatures as well. - require(uint256(s) <= 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0, "Invalid s"); + if (uint256(s) > 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0) { + revert InvalidSig(SigField.S, uint256(s)); + } address recoveredAddress = ecrecover(_hash, v, r, s); @@ -207,7 +216,9 @@ contract DefaultAccount is IAccount { Transaction calldata _transaction ) external payable ignoreNonBootloader ignoreInDelegateCall { bool success = _transaction.payToTheBootloader(); - require(success, "Failed to pay the fee to the operator"); + if (!success) { + revert FailedToPayOperator(); + } } /// @notice Method, where the user should prepare for the transaction to be diff --git a/system-contracts/contracts/EmptyContract.sol b/system-contracts/contracts/EmptyContract.sol index 3f021964a..15516a7b3 100644 --- a/system-contracts/contracts/EmptyContract.sol +++ b/system-contracts/contracts/EmptyContract.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; /** * @author Matter Labs diff --git a/system-contracts/contracts/EventWriter.yul b/system-contracts/contracts/EventWriter.yul index 4cd4a3814..c85151b90 100644 --- a/system-contracts/contracts/EventWriter.yul +++ b/system-contracts/contracts/EventWriter.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev diff --git a/system-contracts/contracts/ImmutableSimulator.sol b/system-contracts/contracts/ImmutableSimulator.sol index 2d077316a..93110d782 100644 --- a/system-contracts/contracts/ImmutableSimulator.sol +++ b/system-contracts/contracts/ImmutableSimulator.sol @@ -1,9 +1,10 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IImmutableSimulator, ImmutableData} from "./interfaces/IImmutableSimulator.sol"; import {DEPLOYER_SYSTEM_CONTRACT} from "./Constants.sol"; +import {Unauthorized} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -32,7 +33,9 @@ contract ImmutableSimulator is IImmutableSimulator { /// @param _dest The address which to store the immutables for. /// @param _immutables The list of the immutables. function setImmutables(address _dest, ImmutableData[] calldata _immutables) external override { - require(msg.sender == address(DEPLOYER_SYSTEM_CONTRACT), "Callable only by the deployer system contract"); + if (msg.sender != address(DEPLOYER_SYSTEM_CONTRACT)) { + revert Unauthorized(msg.sender); + } unchecked { uint256 immutablesLength = _immutables.length; for (uint256 i = 0; i < immutablesLength; ++i) { diff --git a/system-contracts/contracts/KnownCodesStorage.sol b/system-contracts/contracts/KnownCodesStorage.sol index 3db07fe31..31fa04734 100644 --- a/system-contracts/contracts/KnownCodesStorage.sol +++ b/system-contracts/contracts/KnownCodesStorage.sol @@ -1,11 +1,12 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IKnownCodesStorage} from "./interfaces/IKnownCodesStorage.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; import {Utils} from "./libraries/Utils.sol"; import {COMPRESSOR_CONTRACT, L1_MESSENGER_CONTRACT} from "./Constants.sol"; +import {Unauthorized, MalformedBytecode, BytecodeError} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -15,9 +16,11 @@ import {COMPRESSOR_CONTRACT, L1_MESSENGER_CONTRACT} from "./Constants.sol"; * the second byte denotes whether the contract is constructed, and the next two bytes denote the length in 32-byte words. * And then the next 28 bytes is the truncated hash. */ -contract KnownCodesStorage is IKnownCodesStorage, ISystemContract { +contract KnownCodesStorage is IKnownCodesStorage, SystemContractBase { modifier onlyCompressor() { - require(msg.sender == address(COMPRESSOR_CONTRACT), "Callable only by the compressor"); + if (msg.sender != address(COMPRESSOR_CONTRACT)) { + revert Unauthorized(msg.sender); + } _; } @@ -73,8 +76,12 @@ contract KnownCodesStorage is IKnownCodesStorage, ISystemContract { /// That's why we need to validate it function _validateBytecode(bytes32 _bytecodeHash) internal pure { uint8 version = uint8(_bytecodeHash[0]); - require(version == 1 && _bytecodeHash[1] == bytes1(0), "Incorrectly formatted bytecodeHash"); + if (version != 1 || _bytecodeHash[1] != bytes1(0)) { + revert MalformedBytecode(BytecodeError.Version); + } - require(Utils.bytecodeLenInWords(_bytecodeHash) % 2 == 1, "Code length in words must be odd"); + if (Utils.bytecodeLenInWords(_bytecodeHash) % 2 == 0) { + revert MalformedBytecode(BytecodeError.NumberOfWords); + } } } diff --git a/system-contracts/contracts/L1Messenger.sol b/system-contracts/contracts/L1Messenger.sol index 2b584d110..0f9242ef1 100644 --- a/system-contracts/contracts/L1Messenger.sol +++ b/system-contracts/contracts/L1Messenger.sol @@ -1,13 +1,16 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; -import {IL1Messenger, L2ToL1Log, L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, L2_TO_L1_LOG_SERIALIZE_SIZE, STATE_DIFF_COMPRESSION_VERSION_NUMBER} from "./interfaces/IL1Messenger.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {IL1Messenger, L2ToL1Log, L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, L2_TO_L1_LOG_SERIALIZE_SIZE} from "./interfaces/IL1Messenger.sol"; + +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; import {EfficientCall} from "./libraries/EfficientCall.sol"; import {Utils} from "./libraries/Utils.sol"; -import {SystemLogKey, SYSTEM_CONTEXT_CONTRACT, KNOWN_CODE_STORAGE_CONTRACT, COMPRESSOR_CONTRACT, STATE_DIFF_ENTRY_SIZE, L2_TO_L1_LOGS_MERKLE_TREE_LEAVES, PUBDATA_CHUNK_PUBLISHER, COMPUTATIONAL_PRICE_FOR_PUBDATA} from "./Constants.sol"; +import {SystemLogKey, SYSTEM_CONTEXT_CONTRACT, KNOWN_CODE_STORAGE_CONTRACT, L2_TO_L1_LOGS_MERKLE_TREE_LEAVES, COMPUTATIONAL_PRICE_FOR_PUBDATA, L2_MESSAGE_ROOT} from "./Constants.sol"; +import {ReconstructionMismatch, PubdataField} from "./SystemContractErrors.sol"; +import {IL2DAValidator} from "./interfaces/IL2DAValidator.sol"; /** * @author Matter Labs @@ -22,7 +25,7 @@ import {SystemLogKey, SYSTEM_CONTEXT_CONTRACT, KNOWN_CODE_STORAGE_CONTRACT, COMP * - The contract on L1 accepts all sent messages and if the message came from this system contract * it requires that the preimage of `value` be provided. */ -contract L1Messenger is IL1Messenger, ISystemContract { +contract L1Messenger is IL1Messenger, SystemContractBase { /// @notice Sequential hash of logs sent in the current block. /// @dev Will be reset at the end of the block to zero value. bytes32 internal chainedLogsHash; @@ -87,7 +90,7 @@ contract L1Messenger is IL1Messenger, ISystemContract { // - at most 1 time keccakGasCost(64) when building the Merkle tree (as merkle tree can contain // ~2*N nodes, where the first N nodes are leaves the hash of which is calculated on the previous step). uint256 gasToPay = keccakGasCost(L2_TO_L1_LOG_SERIALIZE_SIZE) + 2 * keccakGasCost(64); - SystemContractHelper.burnGas(Utils.safeCastToU32(gasToPay), 0); + SystemContractHelper.burnGas(Utils.safeCastToU32(gasToPay), uint32(L2_TO_L1_LOG_SERIALIZE_SIZE)); } /// @notice Internal function to send L2ToL1Log. @@ -107,7 +110,7 @@ contract L1Messenger is IL1Messenger, ISystemContract { chainedLogsHash = keccak256(abi.encode(chainedLogsHash, hashedLog)); logIdInMerkleTree = numberOfLogsToProcess; - numberOfLogsToProcess++; + ++numberOfLogsToProcess; emit L2ToL1LogSent(_l2ToL1Log); } @@ -181,9 +184,9 @@ contract L1Messenger is IL1Messenger, ISystemContract { emit BytecodeL1PublicationRequested(_bytecodeHash); } - /// @notice Verifies that the {_totalL2ToL1PubdataAndStateDiffs} reflects what occurred within the L1Batch and that + /// @notice Verifies that the {_operatorInput} reflects what occurred within the L1Batch and that /// the compressed statediffs are equivalent to the full state diffs. - /// @param _totalL2ToL1PubdataAndStateDiffs The total pubdata and uncompressed state diffs of transactions that were + /// @param _operatorInput The total pubdata and uncompressed state diffs of transactions that were /// processed in the current L1 Batch. Pubdata consists of L2 to L1 Logs, messages, deployed bytecode, and state diffs. /// @dev Function that should be called exactly once per L1 Batch by the bootloader. /// @dev Checks that totalL2ToL1Pubdata is strictly packed data that should to be published to L1. @@ -192,29 +195,102 @@ contract L1Messenger is IL1Messenger, ISystemContract { /// @dev Performs calculation of L2ToL1Logs merkle tree root, "sends" such root and keccak256(totalL2ToL1Pubdata) /// to L1 using low-level (VM) L2Log. function publishPubdataAndClearState( - bytes calldata _totalL2ToL1PubdataAndStateDiffs + address _l2DAValidator, + bytes calldata _operatorInput ) external onlyCallFromBootloader { uint256 calldataPtr = 0; + // Check function sig and data in the other hashes + // 4 + 32 + 32 + 32 + 32 + 32 + 32 + // 4 bytes for L2 DA Validator `validatePubdata` function selector + // 32 bytes for rolling hash of user L2 -> L1 logs + // 32 bytes for root hash of user L2 -> L1 logs + // 32 bytes for hash of messages + // 32 bytes for hash of uncompressed bytecodes sent to L1 + // Operator data: 32 bytes for offset + // 32 bytes for length + + bytes4 inputL2DAValidatePubdataFunctionSig = bytes4(_operatorInput[calldataPtr:calldataPtr + 4]); + if (inputL2DAValidatePubdataFunctionSig != IL2DAValidator.validatePubdata.selector) { + revert ReconstructionMismatch( + PubdataField.InputDAFunctionSig, + bytes32(IL2DAValidator.validatePubdata.selector), + bytes32(inputL2DAValidatePubdataFunctionSig) + ); + } + calldataPtr += 4; + + bytes32 inputChainedLogsHash = bytes32(_operatorInput[calldataPtr:calldataPtr + 32]); + if (inputChainedLogsHash != chainedLogsHash) { + revert ReconstructionMismatch(PubdataField.InputLogsHash, chainedLogsHash, inputChainedLogsHash); + } + calldataPtr += 32; + + // Check happens below after we reconstruct the logs root hash + bytes32 inputChainedLogsRootHash = bytes32(_operatorInput[calldataPtr:calldataPtr + 32]); + calldataPtr += 32; + + bytes32 inputChainedMsgsHash = bytes32(_operatorInput[calldataPtr:calldataPtr + 32]); + if (inputChainedMsgsHash != chainedMessagesHash) { + revert ReconstructionMismatch(PubdataField.InputMsgsHash, chainedMessagesHash, inputChainedMsgsHash); + } + calldataPtr += 32; + + bytes32 inputChainedBytecodesHash = bytes32(_operatorInput[calldataPtr:calldataPtr + 32]); + if (inputChainedBytecodesHash != chainedL1BytecodesRevealDataHash) { + revert ReconstructionMismatch( + PubdataField.InputBytecodeHash, + chainedL1BytecodesRevealDataHash, + inputChainedBytecodesHash + ); + } + calldataPtr += 32; + + uint256 offset = uint256(bytes32(_operatorInput[calldataPtr:calldataPtr + 32])); + // The length of the pubdata input should be stored right next to the calldata. + // We need to change offset by 32 - 4 = 28 bytes, since 32 bytes is the length of the offset + // itself and the 4 bytes are the selector which is not included inside the offset. + if (offset != calldataPtr + 28) { + revert ReconstructionMismatch(PubdataField.Offset, bytes32(calldataPtr + 28), bytes32(offset)); + } + uint256 length = uint256(bytes32(_operatorInput[calldataPtr + 32:calldataPtr + 64])); + + // Shift calldata ptr past the pubdata offset and len + calldataPtr += 64; + /// Check logs - uint32 numberOfL2ToL1Logs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); - require(numberOfL2ToL1Logs <= L2_TO_L1_LOGS_MERKLE_TREE_LEAVES, "Too many L2->L1 logs"); + uint32 numberOfL2ToL1Logs = uint32(bytes4(_operatorInput[calldataPtr:calldataPtr + 4])); + if (numberOfL2ToL1Logs > L2_TO_L1_LOGS_MERKLE_TREE_LEAVES) { + revert ReconstructionMismatch( + PubdataField.NumberOfLogs, + bytes32(L2_TO_L1_LOGS_MERKLE_TREE_LEAVES), + bytes32(uint256(numberOfL2ToL1Logs)) + ); + } calldataPtr += 4; + // We need to ensure that length is enough to read all logs + if (length < 4 + numberOfL2ToL1Logs * L2_TO_L1_LOG_SERIALIZE_SIZE) { + revert ReconstructionMismatch( + PubdataField.Length, + bytes32(4 + numberOfL2ToL1Logs * L2_TO_L1_LOG_SERIALIZE_SIZE), + bytes32(length) + ); + } + bytes32[] memory l2ToL1LogsTreeArray = new bytes32[](L2_TO_L1_LOGS_MERKLE_TREE_LEAVES); - bytes32 reconstructedChainedLogsHash; + bytes32 reconstructedChainedLogsHash = bytes32(0); for (uint256 i = 0; i < numberOfL2ToL1Logs; ++i) { bytes32 hashedLog = EfficientCall.keccak( - _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + L2_TO_L1_LOG_SERIALIZE_SIZE] + _operatorInput[calldataPtr:calldataPtr + L2_TO_L1_LOG_SERIALIZE_SIZE] ); calldataPtr += L2_TO_L1_LOG_SERIALIZE_SIZE; l2ToL1LogsTreeArray[i] = hashedLog; reconstructedChainedLogsHash = keccak256(abi.encode(reconstructedChainedLogsHash, hashedLog)); } - require( - reconstructedChainedLogsHash == chainedLogsHash, - "reconstructedChainedLogsHash is not equal to chainedLogsHash" - ); + if (reconstructedChainedLogsHash != chainedLogsHash) { + revert ReconstructionMismatch(PubdataField.LogsHash, chainedLogsHash, reconstructedChainedLogsHash); + } for (uint256 i = numberOfL2ToL1Logs; i < L2_TO_L1_LOGS_MERKLE_TREE_LEAVES; ++i) { l2ToL1LogsTreeArray[i] = L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH; } @@ -227,101 +303,40 @@ contract L1Messenger is IL1Messenger, ISystemContract { ); } } - bytes32 l2ToL1LogsTreeRoot = l2ToL1LogsTreeArray[0]; + bytes32 localLogsRootHash = l2ToL1LogsTreeArray[0]; - /// Check messages - uint32 numberOfMessages = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); - calldataPtr += 4; - bytes32 reconstructedChainedMessagesHash; - for (uint256 i = 0; i < numberOfMessages; ++i) { - uint32 currentMessageLength = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); - calldataPtr += 4; - bytes32 hashedMessage = EfficientCall.keccak( - _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + currentMessageLength] - ); - calldataPtr += currentMessageLength; - reconstructedChainedMessagesHash = keccak256(abi.encode(reconstructedChainedMessagesHash, hashedMessage)); - } - require( - reconstructedChainedMessagesHash == chainedMessagesHash, - "reconstructedChainedMessagesHash is not equal to chainedMessagesHash" - ); + bytes32 aggregatedRootHash = L2_MESSAGE_ROOT.getAggregatedRoot(); + bytes32 fullRootHash = keccak256(bytes.concat(localLogsRootHash, aggregatedRootHash)); - /// Check bytecodes - uint32 numberOfBytecodes = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); - calldataPtr += 4; - bytes32 reconstructedChainedL1BytecodesRevealDataHash; - for (uint256 i = 0; i < numberOfBytecodes; ++i) { - uint32 currentBytecodeLength = uint32( - bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4]) - ); - calldataPtr += 4; - reconstructedChainedL1BytecodesRevealDataHash = keccak256( - abi.encode( - reconstructedChainedL1BytecodesRevealDataHash, - Utils.hashL2Bytecode( - _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + currentBytecodeLength] - ) - ) - ); - calldataPtr += currentBytecodeLength; + if (inputChainedLogsRootHash != localLogsRootHash) { + revert ReconstructionMismatch(PubdataField.InputLogsRootHash, localLogsRootHash, inputChainedLogsRootHash); } - require( - reconstructedChainedL1BytecodesRevealDataHash == chainedL1BytecodesRevealDataHash, - "reconstructedChainedL1BytecodesRevealDataHash is not equal to chainedL1BytecodesRevealDataHash" - ); - - /// Check State Diffs - /// encoding is as follows: - /// header (1 byte version, 3 bytes total len of compressed, 1 byte enumeration index size) - /// body (`compressedStateDiffSize` bytes, 4 bytes number of state diffs, `numberOfStateDiffs` * `STATE_DIFF_ENTRY_SIZE` bytes for the uncompressed state diffs) - /// encoded state diffs: [20bytes address][32bytes key][32bytes derived key][8bytes enum index][32bytes initial value][32bytes final value] - require( - uint256(uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr]))) == - STATE_DIFF_COMPRESSION_VERSION_NUMBER, - "state diff compression version mismatch" - ); - calldataPtr++; - - uint24 compressedStateDiffSize = uint24(bytes3(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 3])); - calldataPtr += 3; - - uint8 enumerationIndexSize = uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr])); - calldataPtr++; - - bytes calldata compressedStateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + - compressedStateDiffSize]; - calldataPtr += compressedStateDiffSize; - bytes calldata totalL2ToL1Pubdata = _totalL2ToL1PubdataAndStateDiffs[:calldataPtr]; - - uint32 numberOfStateDiffs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); - calldataPtr += 4; - - bytes calldata stateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + - (numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE)]; - calldataPtr += numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; - - bytes32 stateDiffHash = COMPRESSOR_CONTRACT.verifyCompressedStateDiffs( - numberOfStateDiffs, - enumerationIndexSize, - stateDiffs, - compressedStateDiffs - ); - - /// Check for calldata strict format - require(calldataPtr == _totalL2ToL1PubdataAndStateDiffs.length, "Extra data in the totalL2ToL1Pubdata array"); - - PUBDATA_CHUNK_PUBLISHER.chunkAndPublishPubdata(totalL2ToL1Pubdata); + bytes32 l2DAValidatorOutputhash = bytes32(0); + if (_l2DAValidator != address(0)) { + bytes memory returnData = EfficientCall.call({ + _gas: gasleft(), + _address: _l2DAValidator, + _value: 0, + _data: _operatorInput, + _isSystem: false + }); + + l2DAValidatorOutputhash = abi.decode(returnData, (bytes32)); + } /// Native (VM) L2 to L1 log - SystemContractHelper.toL1(true, bytes32(uint256(SystemLogKey.L2_TO_L1_LOGS_TREE_ROOT_KEY)), l2ToL1LogsTreeRoot); + SystemContractHelper.toL1(true, bytes32(uint256(SystemLogKey.L2_TO_L1_LOGS_TREE_ROOT_KEY)), fullRootHash); + SystemContractHelper.toL1( + true, + bytes32(uint256(SystemLogKey.USED_L2_DA_VALIDATOR_ADDRESS_KEY)), + bytes32(uint256(uint160(_l2DAValidator))) + ); SystemContractHelper.toL1( true, - bytes32(uint256(SystemLogKey.TOTAL_L2_TO_L1_PUBDATA_KEY)), - EfficientCall.keccak(totalL2ToL1Pubdata) + bytes32(uint256(SystemLogKey.L2_DA_VALIDATOR_OUTPUT_HASH_KEY)), + l2DAValidatorOutputhash ); - SystemContractHelper.toL1(true, bytes32(uint256(SystemLogKey.STATE_DIFF_HASH_KEY)), stateDiffHash); /// Clear logs state chainedLogsHash = bytes32(0); diff --git a/system-contracts/contracts/L2BaseToken.sol b/system-contracts/contracts/L2BaseToken.sol index 8101c638b..9f826a80b 100644 --- a/system-contracts/contracts/L2BaseToken.sol +++ b/system-contracts/contracts/L2BaseToken.sol @@ -1,11 +1,12 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IBaseToken} from "./interfaces/IBaseToken.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; import {MSG_VALUE_SYSTEM_CONTRACT, DEPLOYER_SYSTEM_CONTRACT, BOOTLOADER_FORMAL_ADDRESS, L1_MESSENGER_CONTRACT} from "./Constants.sol"; import {IMailbox} from "./interfaces/IMailbox.sol"; +import {Unauthorized, InsufficientFunds} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -15,7 +16,7 @@ import {IMailbox} from "./interfaces/IMailbox.sol"; * Instead, this contract is used by the bootloader and `MsgValueSimulator`/`ContractDeployer` system contracts * to perform the balance changes while simulating the `msg.value` Ethereum behavior. */ -contract L2BaseToken is IBaseToken, ISystemContract { +contract L2BaseToken is IBaseToken, SystemContractBase { /// @notice The balances of the users. mapping(address account => uint256 balance) internal balance; @@ -30,15 +31,18 @@ contract L2BaseToken is IBaseToken, ISystemContract { /// @dev This function also emits "Transfer" event, which might be removed /// later on. function transferFromTo(address _from, address _to, uint256 _amount) external override { - require( - msg.sender == MSG_VALUE_SYSTEM_CONTRACT || - msg.sender == address(DEPLOYER_SYSTEM_CONTRACT) || - msg.sender == BOOTLOADER_FORMAL_ADDRESS, - "Only system contracts with special access can call this method" - ); + if ( + msg.sender != MSG_VALUE_SYSTEM_CONTRACT && + msg.sender != address(DEPLOYER_SYSTEM_CONTRACT) && + msg.sender != BOOTLOADER_FORMAL_ADDRESS + ) { + revert Unauthorized(msg.sender); + } uint256 fromBalance = balance[_from]; - require(fromBalance >= _amount, "Transfer amount exceeds balance"); + if (fromBalance < _amount) { + revert InsufficientFunds(_amount, fromBalance); + } unchecked { balance[_from] = fromBalance - _amount; // Overflow not possible: the sum of all balances is capped by totalSupply, and the sum is preserved by @@ -82,7 +86,7 @@ contract L2BaseToken is IBaseToken, ISystemContract { /// @notice Initiate the withdrawal of the base token, with the sent message. The funds will be available to claim on L1 `finalizeEthWithdrawal` method. /// @param _l1Receiver The address on L1 to receive the funds. /// @param _additionalData Additional data to be sent to L1 with the withdrawal. - function withdrawWithMessage(address _l1Receiver, bytes memory _additionalData) external payable override { + function withdrawWithMessage(address _l1Receiver, bytes calldata _additionalData) external payable override { uint256 amount = _burnMsgValue(); // Send the L2 log, a user could use it as proof of the withdrawal diff --git a/system-contracts/contracts/L2GatewayUpgrade.sol b/system-contracts/contracts/L2GatewayUpgrade.sol new file mode 100644 index 000000000..bef6009ae --- /dev/null +++ b/system-contracts/contracts/L2GatewayUpgrade.sol @@ -0,0 +1,78 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {DEPLOYER_SYSTEM_CONTRACT} from "./Constants.sol"; +import {IContractDeployer, ForceDeployment} from "./interfaces/IContractDeployer.sol"; +import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; +import {FixedForceDeploymentsData, ZKChainSpecificForceDeploymentsData} from "./interfaces/IL2GenesisUpgrade.sol"; + +import {L2GenesisUpgradeHelper} from "./L2GenesisUpgradeHelper.sol"; +import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts-v4/proxy/transparent/TransparentUpgradeableProxy.sol"; +import {IL2SharedBridgeLegacy} from "./interfaces/IL2SharedBridgeLegacy.sol"; +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; + +/// @custom:security-contact security@matterlabs.dev +/// @author Matter Labs +/// @notice The contract that is used for facilitating the upgrade of the L2 +/// to the protocol version that supports gateway +/// @dev This contract is neither predeployed nor a system contract. It is located +/// in this folder due to very overlapping functionality with `L2GenesisUpgrade` and +/// facilitating reusage of the code. +/// @dev During the upgrade, it will be delegate-called by the `ComplexUpgrader` contract. +contract L2GatewayUpgrade { + function upgrade( + ForceDeployment[] calldata _forceDeployments, + address _ctmDeployer, + bytes calldata _fixedForceDeploymentsData, + bytes calldata _additionalForceDeploymentsData + ) external payable { + // Firstly, we force deploy the main set of contracts. + // Those will be deployed without any contract invocation. + IContractDeployer(DEPLOYER_SYSTEM_CONTRACT).forceDeployOnAddresses{value: msg.value}(_forceDeployments); + + // Secondly, we perform the more complex deployment of the gateway contracts. + L2GenesisUpgradeHelper.performForceDeployedContractsInit( + _ctmDeployer, + _fixedForceDeploymentsData, + _additionalForceDeploymentsData + ); + + ZKChainSpecificForceDeploymentsData memory additionalForceDeploymentsData = abi.decode( + _additionalForceDeploymentsData, + (ZKChainSpecificForceDeploymentsData) + ); + + address l2LegacyBridgeAddress = additionalForceDeploymentsData.l2LegacySharedBridge; + + if (l2LegacyBridgeAddress != address(0)) { + FixedForceDeploymentsData memory fixedForceDeploymentsData = abi.decode( + _fixedForceDeploymentsData, + (FixedForceDeploymentsData) + ); + + // Firstly, upgrade the legacy L2SharedBridge + bytes memory bridgeUpgradeData = abi.encodeCall( + ITransparentUpgradeableProxy.upgradeTo, + (fixedForceDeploymentsData.l2SharedBridgeLegacyImpl) + ); + SystemContractHelper.mimicCallWithPropagatedRevert( + l2LegacyBridgeAddress, + fixedForceDeploymentsData.l2BridgeProxyOwnerAddress, + bridgeUpgradeData + ); + + // Secondly, upgrade the tokens + UpgradeableBeacon upgradableBeacon = IL2SharedBridgeLegacy(l2LegacyBridgeAddress).l2TokenBeacon(); + bytes memory beaconUpgradeData = abi.encodeCall( + UpgradeableBeacon.upgradeTo, + (fixedForceDeploymentsData.l2BridgedStandardERC20Impl) + ); + SystemContractHelper.mimicCallWithPropagatedRevert( + address(upgradableBeacon), + fixedForceDeploymentsData.l2BridgedStandardERC20ProxyOwnerAddress, + beaconUpgradeData + ); + } + } +} diff --git a/system-contracts/contracts/L2GenesisUpgrade.sol b/system-contracts/contracts/L2GenesisUpgrade.sol new file mode 100644 index 000000000..585ff14f1 --- /dev/null +++ b/system-contracts/contracts/L2GenesisUpgrade.sol @@ -0,0 +1,33 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {SYSTEM_CONTEXT_CONTRACT} from "./Constants.sol"; +import {ISystemContext} from "./interfaces/ISystemContext.sol"; +import {IL2GenesisUpgrade} from "./interfaces/IL2GenesisUpgrade.sol"; + +import {L2GenesisUpgradeHelper} from "./L2GenesisUpgradeHelper.sol"; + +/// @custom:security-contact security@matterlabs.dev +/// @author Matter Labs +/// @notice The contract that can be used for deterministic contract deployment. +contract L2GenesisUpgrade is IL2GenesisUpgrade { + function genesisUpgrade( + uint256 _chainId, + address _ctmDeployer, + bytes calldata _fixedForceDeploymentsData, + bytes calldata _additionalForceDeploymentsData + ) external payable { + // solhint-disable-next-line gas-custom-errors + require(_chainId != 0, "Invalid chainId"); + ISystemContext(SYSTEM_CONTEXT_CONTRACT).setChainId(_chainId); + + L2GenesisUpgradeHelper.performForceDeployedContractsInit( + _ctmDeployer, + _fixedForceDeploymentsData, + _additionalForceDeploymentsData + ); + + emit UpgradeComplete(_chainId); + } +} diff --git a/system-contracts/contracts/L2GenesisUpgradeHelper.sol b/system-contracts/contracts/L2GenesisUpgradeHelper.sol new file mode 100644 index 000000000..30752c948 --- /dev/null +++ b/system-contracts/contracts/L2GenesisUpgradeHelper.sol @@ -0,0 +1,117 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +import {DEPLOYER_SYSTEM_CONTRACT, L2_BRIDGE_HUB, L2_ASSET_ROUTER, L2_MESSAGE_ROOT, L2_NATIVE_TOKEN_VAULT_ADDR} from "./Constants.sol"; +import {IContractDeployer, ForceDeployment} from "./interfaces/IContractDeployer.sol"; +import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; +import {FixedForceDeploymentsData, ZKChainSpecificForceDeploymentsData} from "./interfaces/IL2GenesisUpgrade.sol"; + +library L2GenesisUpgradeHelper { + function performForceDeployedContractsInit( + address _ctmDeployer, + bytes calldata _fixedForceDeploymentsData, + bytes calldata _additionalForceDeploymentsData + ) internal { + ForceDeployment[] memory forceDeployments = _getForceDeploymentsData( + _fixedForceDeploymentsData, + _additionalForceDeploymentsData + ); + IContractDeployer(DEPLOYER_SYSTEM_CONTRACT).forceDeployOnAddresses{value: msg.value}(forceDeployments); + + // It is expected that either via to the force deployments above + // or upon init both the L2 deployment of Bridgehub, AssetRouter and MessageRoot are deployed. + // (The comment does not mention the exact order in case it changes) + // However, there is still some follow up finalization that needs to be done. + + address bridgehubOwner = L2_BRIDGE_HUB.owner(); + + bytes memory data = abi.encodeCall( + L2_BRIDGE_HUB.setAddresses, + (L2_ASSET_ROUTER, _ctmDeployer, address(L2_MESSAGE_ROOT)) + ); + + (bool success, bytes memory returnData) = SystemContractHelper.mimicCall( + address(L2_BRIDGE_HUB), + bridgehubOwner, + data + ); + if (!success) { + // Progapatate revert reason + assembly { + revert(add(returnData, 0x20), returndatasize()) + } + } + } + + function _getForceDeploymentsData( + bytes calldata _fixedForceDeploymentsData, + bytes calldata _additionalForceDeploymentsData + ) internal view returns (ForceDeployment[] memory forceDeployments) { + FixedForceDeploymentsData memory fixedForceDeploymentsData = abi.decode( + _fixedForceDeploymentsData, + (FixedForceDeploymentsData) + ); + ZKChainSpecificForceDeploymentsData memory additionalForceDeploymentsData = abi.decode( + _additionalForceDeploymentsData, + (ZKChainSpecificForceDeploymentsData) + ); + + forceDeployments = new ForceDeployment[](4); + + forceDeployments[0] = ForceDeployment({ + bytecodeHash: fixedForceDeploymentsData.messageRootBytecodeHash, + newAddress: address(L2_MESSAGE_ROOT), + callConstructor: true, + value: 0, + // solhint-disable-next-line func-named-parameters + input: abi.encode(address(L2_BRIDGE_HUB)) + }); + + forceDeployments[1] = ForceDeployment({ + bytecodeHash: fixedForceDeploymentsData.bridgehubBytecodeHash, + newAddress: address(L2_BRIDGE_HUB), + callConstructor: true, + value: 0, + input: abi.encode( + fixedForceDeploymentsData.l1ChainId, + fixedForceDeploymentsData.aliasedL1Governance, + fixedForceDeploymentsData.maxNumberOfZKChains + ) + }); + + forceDeployments[2] = ForceDeployment({ + bytecodeHash: fixedForceDeploymentsData.l2AssetRouterBytecodeHash, + newAddress: address(L2_ASSET_ROUTER), + callConstructor: true, + value: 0, + // solhint-disable-next-line func-named-parameters + input: abi.encode( + fixedForceDeploymentsData.l1ChainId, + fixedForceDeploymentsData.eraChainId, + fixedForceDeploymentsData.l1AssetRouter, + additionalForceDeploymentsData.l2LegacySharedBridge, + additionalForceDeploymentsData.baseTokenAssetId, + fixedForceDeploymentsData.aliasedL1Governance + ) + }); + + forceDeployments[3] = ForceDeployment({ + bytecodeHash: fixedForceDeploymentsData.l2NtvBytecodeHash, + newAddress: L2_NATIVE_TOKEN_VAULT_ADDR, + callConstructor: true, + value: 0, + // solhint-disable-next-line func-named-parameters + input: abi.encode( + fixedForceDeploymentsData.l1ChainId, + fixedForceDeploymentsData.aliasedL1Governance, + fixedForceDeploymentsData.l2TokenProxyBytecodeHash, + additionalForceDeploymentsData.l2LegacySharedBridge, + address(0), // this is used if the contract were already deployed, so for the migration of Era. + false, + additionalForceDeploymentsData.l2Weth, + additionalForceDeploymentsData.baseTokenAssetId + ) + }); + } +} diff --git a/system-contracts/contracts/MsgValueSimulator.sol b/system-contracts/contracts/MsgValueSimulator.sol index c1dcde694..5fcd0f2d9 100644 --- a/system-contracts/contracts/MsgValueSimulator.sol +++ b/system-contracts/contracts/MsgValueSimulator.sol @@ -1,12 +1,13 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {Utils} from "./libraries/Utils.sol"; import {EfficientCall} from "./libraries/EfficientCall.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; import {MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, REAL_BASE_TOKEN_SYSTEM_CONTRACT} from "./Constants.sol"; +import {InvalidCall} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -16,7 +17,7 @@ import {MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, REAL_BASE_TOKEN_SYSTEM_CONTRACT} from * the address to call in the second extraAbi param, transfers the funds and uses `mimicCall` to continue the * call with the same msg.sender. */ -contract MsgValueSimulator is ISystemContract { +contract MsgValueSimulator is SystemContractBase { /// @notice Extract value, isSystemCall and to from the extraAbi params. /// @dev The contract accepts value, the callee and whether the call should be a system one via its ABI params. /// @dev The first ABI param contains the value in the [0..127] bits. The 128th contains @@ -33,17 +34,18 @@ contract MsgValueSimulator is ISystemContract { } /// @notice The maximal number of gas out of the stipend that should be passed to the callee. - uint256 constant GAS_TO_PASS = 2300; + uint256 private constant GAS_TO_PASS = 2300; /// @notice The amount of gas that is passed to the MsgValueSimulator as a stipend. /// This number servers to pay for the ETH transfer as well as to provide gas for the `GAS_TO_PASS` gas. /// It is equal to the following constant: https://github.com/matter-labs/era-zkevm_opcode_defs/blob/7bf8016f5bb13a73289f321ad6ea8f614540ece9/src/system_params.rs#L96. - uint256 constant MSG_VALUE_SIMULATOR_STIPEND_GAS = 27000; + uint256 private constant MSG_VALUE_SIMULATOR_STIPEND_GAS = 27000; /// @notice The fallback function that is the main entry point for the MsgValueSimulator. /// @dev The contract accepts value, the callee and whether the call should be a system one via its ABI params. /// @param _data The calldata to be passed to the callee. /// @return The return data from the callee. + // solhint-disable-next-line payable-fallback fallback(bytes calldata _data) external onlySystemCall returns (bytes memory) { // Firstly we calculate how much gas has been actually provided by the user to the inner call. // For that, we need to get the total gas available in this context and subtract the stipend from it. @@ -57,7 +59,9 @@ contract MsgValueSimulator is ISystemContract { (uint256 value, bool isSystemCall, address to) = _getAbiParams(); // Prevent mimic call to the MsgValueSimulator to prevent an unexpected change of callee. - require(to != address(this), "MsgValueSimulator calls itself"); + if (to == address(this)) { + revert InvalidCall(); + } if (value != 0) { (bool success, ) = address(REAL_BASE_TOKEN_SYSTEM_CONTRACT).call( diff --git a/system-contracts/contracts/NonceHolder.sol b/system-contracts/contracts/NonceHolder.sol index 12abda8bd..cca07b1b4 100644 --- a/system-contracts/contracts/NonceHolder.sol +++ b/system-contracts/contracts/NonceHolder.sol @@ -1,11 +1,12 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {INonceHolder} from "./interfaces/INonceHolder.sol"; import {IContractDeployer} from "./interfaces/IContractDeployer.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; import {DEPLOYER_SYSTEM_CONTRACT} from "./Constants.sol"; +import {NonceIncreaseError, ZeroNonceError, NonceJumpError, ValueMismatch, NonceAlreadyUsed, NonceNotUsed, Unauthorized} from "./SystemContractErrors.sol"; /** * @author Matter Labs @@ -24,7 +25,7 @@ import {DEPLOYER_SYSTEM_CONTRACT} from "./Constants.sol"; * @dev The behavior of some of the methods depends on the nonce ordering of the account. Nonce ordering is a mere suggestion and all the checks that are present * here serve more as a help to users to prevent from doing mistakes, rather than any invariants. */ -contract NonceHolder is INonceHolder, ISystemContract { +contract NonceHolder is INonceHolder, SystemContractBase { uint256 private constant DEPLOY_NONCE_MULTIPLIER = 2 ** 128; /// The minNonce can be increased by 2^32 at a time to prevent it from /// overflowing beyond 2**128. @@ -63,7 +64,9 @@ contract NonceHolder is INonceHolder, ISystemContract { /// @param _value The number by which to increase the minimal nonce for msg.sender. /// @return oldMinNonce The value of the minimal nonce for msg.sender before the increase. function increaseMinNonce(uint256 _value) public onlySystemCall returns (uint256 oldMinNonce) { - require(_value <= MAXIMAL_MIN_NONCE_INCREMENT, "The value for incrementing the nonce is too high"); + if (_value > MAXIMAL_MIN_NONCE_INCREMENT) { + revert NonceIncreaseError(MAXIMAL_MIN_NONCE_INCREMENT, _value); + } uint256 addressAsKey = uint256(uint160(msg.sender)); uint256 oldRawNonce = rawNonces[addressAsKey]; @@ -82,11 +85,15 @@ contract NonceHolder is INonceHolder, ISystemContract { function setValueUnderNonce(uint256 _key, uint256 _value) public onlySystemCall { IContractDeployer.AccountInfo memory accountInfo = DEPLOYER_SYSTEM_CONTRACT.getAccountInfo(msg.sender); - require(_value != 0, "Nonce value cannot be set to 0"); + if (_value == 0) { + revert ZeroNonceError(); + } // If an account has sequential nonce ordering, we enforce that the previous // nonce has already been used. if (accountInfo.nonceOrdering == IContractDeployer.AccountNonceOrdering.Sequential && _key != 0) { - require(isNonceUsed(msg.sender, _key - 1), "Previous nonce has not been used"); + if (!isNonceUsed(msg.sender, _key - 1)) { + revert NonceJumpError(); + } } uint256 addressAsKey = uint256(uint160(msg.sender)); @@ -112,7 +119,9 @@ contract NonceHolder is INonceHolder, ISystemContract { uint256 oldRawNonce = rawNonces[addressAsKey]; (, uint256 oldMinNonce) = _splitRawNonce(oldRawNonce); - require(oldMinNonce == _expectedNonce, "Incorrect nonce"); + if (oldMinNonce != _expectedNonce) { + revert ValueMismatch(_expectedNonce, oldMinNonce); + } unchecked { rawNonces[addressAsKey] = oldRawNonce + 1; @@ -133,10 +142,9 @@ contract NonceHolder is INonceHolder, ISystemContract { /// @param _address The address of the account which to return the deploy nonce for. /// @return prevDeploymentNonce The deployment nonce at the time this function is called. function incrementDeploymentNonce(address _address) external returns (uint256 prevDeploymentNonce) { - require( - msg.sender == address(DEPLOYER_SYSTEM_CONTRACT), - "Only the contract deployer can increment the deployment nonce" - ); + if (msg.sender != address(DEPLOYER_SYSTEM_CONTRACT)) { + revert Unauthorized(msg.sender); + } uint256 addressAsKey = uint256(uint160(_address)); uint256 oldRawNonce = rawNonces[addressAsKey]; @@ -167,9 +175,9 @@ contract NonceHolder is INonceHolder, ISystemContract { bool isUsed = isNonceUsed(_address, _key); if (isUsed && !_shouldBeUsed) { - revert("Reusing the same nonce twice"); + revert NonceAlreadyUsed(_address, _key); } else if (!isUsed && _shouldBeUsed) { - revert("The nonce was not set as used"); + revert NonceNotUsed(_address, _key); } } diff --git a/system-contracts/contracts/PubdataChunkPublisher.sol b/system-contracts/contracts/PubdataChunkPublisher.sol index 53c265e9b..f61f0b5ac 100644 --- a/system-contracts/contracts/PubdataChunkPublisher.sol +++ b/system-contracts/contracts/PubdataChunkPublisher.sol @@ -1,29 +1,33 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {IPubdataChunkPublisher} from "./interfaces/IPubdataChunkPublisher.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; -import {L1_MESSENGER_CONTRACT, BLOB_SIZE_BYTES, MAX_NUMBER_OF_BLOBS, SystemLogKey} from "./Constants.sol"; -import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; +import {BLOB_SIZE_BYTES, MAX_NUMBER_OF_BLOBS} from "./Constants.sol"; +import {TooMuchPubdata} from "./SystemContractErrors.sol"; /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev * @notice Smart contract for chunking pubdata into the appropriate size for EIP-4844 blobs. */ -contract PubdataChunkPublisher is IPubdataChunkPublisher, ISystemContract { +contract PubdataChunkPublisher is IPubdataChunkPublisher, SystemContractBase { /// @notice Chunks pubdata into pieces that can fit into blobs. /// @param _pubdata The total l2 to l1 pubdata that will be sent via L1 blobs. /// @dev Note: This is an early implementation, in the future we plan to support up to 16 blobs per l1 batch. - /// @dev We always publish 6 system logs even if our pubdata fits into a single blob. This makes processing logs on L1 easier. - function chunkAndPublishPubdata(bytes calldata _pubdata) external onlyCallFrom(address(L1_MESSENGER_CONTRACT)) { - require(_pubdata.length <= BLOB_SIZE_BYTES * MAX_NUMBER_OF_BLOBS, "pubdata should fit in 6 blobs"); + function chunkPubdataToBlobs(bytes calldata _pubdata) external pure returns (bytes32[] memory blobLinearHashes) { + if (_pubdata.length > BLOB_SIZE_BYTES * MAX_NUMBER_OF_BLOBS) { + revert TooMuchPubdata(BLOB_SIZE_BYTES * MAX_NUMBER_OF_BLOBS, _pubdata.length); + } + + // `+BLOB_SIZE_BYTES-1` is used to round up the division. + uint256 blobCount = (_pubdata.length + BLOB_SIZE_BYTES - 1) / BLOB_SIZE_BYTES; - bytes32[] memory blobHashes = new bytes32[](MAX_NUMBER_OF_BLOBS); + blobLinearHashes = new bytes32[](blobCount); - // We allocate to the full size of MAX_NUMBER_OF_BLOBS * BLOB_SIZE_BYTES because we need to pad + // We allocate to the full size of blobCount * BLOB_SIZE_BYTES because we need to pad // the data on the right with 0s if it doesn't take up the full blob - bytes memory totalBlobs = new bytes(BLOB_SIZE_BYTES * MAX_NUMBER_OF_BLOBS); + bytes memory totalBlobs = new bytes(BLOB_SIZE_BYTES * blobCount); assembly { // The pointer to the allocated memory above. We skip 32 bytes to avoid overwriting the length. @@ -31,15 +35,9 @@ contract PubdataChunkPublisher is IPubdataChunkPublisher, ISystemContract { calldatacopy(ptr, _pubdata.offset, _pubdata.length) } - for (uint256 i = 0; i < MAX_NUMBER_OF_BLOBS; i++) { + for (uint256 i = 0; i < blobCount; ++i) { uint256 start = BLOB_SIZE_BYTES * i; - // We break if the pubdata isn't enough to cover all 6 blobs. On L1 it is expected that the hash - // will be bytes32(0) if a blob isn't going to be used. - if (start >= _pubdata.length) { - break; - } - bytes32 blobHash; assembly { // The pointer to the allocated memory above skipping the length. @@ -47,15 +45,7 @@ contract PubdataChunkPublisher is IPubdataChunkPublisher, ISystemContract { blobHash := keccak256(add(ptr, start), BLOB_SIZE_BYTES) } - blobHashes[i] = blobHash; - } - - for (uint8 i = 0; i < MAX_NUMBER_OF_BLOBS; i++) { - SystemContractHelper.toL1( - true, - bytes32(uint256(SystemLogKey(i + uint256(SystemLogKey.BLOB_ONE_HASH_KEY)))), - blobHashes[i] - ); + blobLinearHashes[i] = blobHash; } } } diff --git a/system-contracts/contracts/SystemContext.sol b/system-contracts/contracts/SystemContext.sol index 51b9633d9..18a2dc143 100644 --- a/system-contracts/contracts/SystemContext.sol +++ b/system-contracts/contracts/SystemContext.sol @@ -1,12 +1,14 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +// solhint-disable reason-string, gas-custom-errors + +pragma solidity 0.8.24; import {ISystemContext} from "./interfaces/ISystemContext.sol"; -import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractBase} from "./abstract/SystemContractBase.sol"; import {ISystemContextDeprecated} from "./interfaces/ISystemContextDeprecated.sol"; import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; -import {BOOTLOADER_FORMAL_ADDRESS, SystemLogKey} from "./Constants.sol"; +import {BOOTLOADER_FORMAL_ADDRESS, SystemLogKey, COMPLEX_UPGRADER_CONTRACT} from "./Constants.sol"; /** * @author Matter Labs @@ -14,7 +16,7 @@ import {BOOTLOADER_FORMAL_ADDRESS, SystemLogKey} from "./Constants.sol"; * @notice Contract that stores some of the context variables, that may be either * block-scoped, tx-scoped or system-wide. */ -contract SystemContext is ISystemContext, ISystemContextDeprecated, ISystemContract { +contract SystemContext is ISystemContext, ISystemContextDeprecated, SystemContractBase { /// @notice The number of latest L2 blocks to store. /// @dev EVM requires us to be able to query the hashes of previous 256 blocks. /// We could either: @@ -83,7 +85,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated, ISystemContr /// @notice Set the chainId origin. /// @param _newChainId The chainId - function setChainId(uint256 _newChainId) external onlyCallFromForceDeployer { + function setChainId(uint256 _newChainId) external onlyCallFrom(address(COMPLEX_UPGRADER_CONTRACT)) { chainId = _newChainId; } @@ -289,6 +291,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated, ISystemContr virtualBlockUpgradeInfo.virtualBlockStartBatch = currentBatchNumber; require(_maxVirtualBlocksToCreate > 0, "Can't initialize the first virtual block"); + // solhint-disable-next-line gas-increment-by-one _maxVirtualBlocksToCreate -= 1; } else if (_maxVirtualBlocksToCreate == 0) { // The virtual blocks have been already initialized, but the operator didn't ask to create @@ -484,7 +487,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated, ISystemContr } function incrementTxNumberInBatch() external onlyCallFromBootloader { - txNumberInBlock += 1; + ++txNumberInBlock; } function resetTxNumberInBatch() external onlyCallFromBootloader { diff --git a/system-contracts/contracts/SystemContractErrors.sol b/system-contracts/contracts/SystemContractErrors.sol new file mode 100644 index 000000000..d61e99c10 --- /dev/null +++ b/system-contracts/contracts/SystemContractErrors.sol @@ -0,0 +1,147 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +// 0x86bb51b8 +error AddressHasNoCode(address); +// 0xefce78c7 +error CallerMustBeBootloader(); +// 0xb7549616 +error CallerMustBeForceDeployer(); +// 0x9eedbd2b +error CallerMustBeSystemContract(); +// 0x4f951510 +error CompressionValueAddError(uint256 expected, uint256 actual); +// 0x1e6aff87 +error CompressionValueTransformError(uint256 expected, uint256 actual); +// 0xc2ea251e +error CompressionValueSubError(uint256 expected, uint256 actual); +// 0x849acb7f +error CompressorInitialWritesProcessedNotEqual(uint256 expected, uint256 actual); +// 0x61a6a4b3 +error CompressorEnumIndexNotEqual(uint256 expected, uint256 actual); +// 0x9be48d8d +error DerivedKeyNotEqualToCompressedValue(bytes32 expected, bytes32 provided); +// 0xe223db5e +error DictionaryDividedByEightNotGreaterThanEncodedDividedByTwo(); +// 0x1c25715b +error EmptyBytes32(); +// 0x92bf3cf8 +error EmptyVirtualBlocks(); +// 0xc06d5cb2 +error EncodedAndRealBytecodeChunkNotEqual(uint64 expected, uint64 provided); +// 0x2bfbfc11 +error EncodedLengthNotFourTimesSmallerThanOriginal(); +// 0xe95a1fbe +error FailedToChargeGas(); +// 0x1f70c58f +error FailedToPayOperator(); +// 0x9d5da395 +error FirstL2BlockInitializationError(); +// 0x9e4a3c8a +error HashIsNonZero(bytes32); +// 0x86302004 +error HashMismatch(bytes32 expected, uint256 actual); +// 0x4e23d035 +error IndexOutOfBounds(); +// 0x122e73e9 +error IndexSizeError(); +// 0x03eb8b54 +error InsufficientFunds(uint256 required, uint256 actual); +// 0x1c26714c +error InsufficientGas(); +// 0xae962d4e +error InvalidCall(); +// 0x6a84bc39 +error InvalidCodeHash(CodeHashReason); +// 0xb4fa3fb3 +error InvalidInput(); +// 0x60b85677 +error InvalidNonceOrderingChange(); +// 0x90f049c9 +error InvalidSig(SigField, uint256); +// 0xf4a271b5 +error Keccak256InvalidReturnData(); +// 0xd2906dd9 +error L2BlockMustBeGreaterThanZero(); +// 0x43e266b0 +error MalformedBytecode(BytecodeError); +// 0xe90aded4 +error NonceAlreadyUsed(address account, uint256 nonce); +// 0x45ac24a6 +error NonceIncreaseError(uint256 max, uint256 proposed); +// 0x13595475 +error NonceJumpError(); +// 0x1f2f8478 +error NonceNotUsed(address account, uint256 nonce); +// 0x760a1568 +error NonEmptyAccount(); +// 0x536ec84b +error NonEmptyMsgValue(); +// 0xd018e08e +error NonIncreasingTimestamp(); +// 0x50df6bc3 +error NotAllowedToDeployInKernelSpace(); +// 0x35278d12 +error Overflow(); +// 0x7f7b0cf7 +error ReconstructionMismatch(PubdataField, bytes32 expected, bytes32 actual); +// 0x3adb5f1d +error ShaInvalidReturnData(); +// 0xbd8665e2 +error StateDiffLengthMismatch(); +// 0x71c3da01 +error SystemCallFlagRequired(); +// 0xe0456dfe +error TooMuchPubdata(uint256 limit, uint256 supplied); +// 0x8e4a23d6 +error Unauthorized(address); +// 0x3e5efef9 +error UnknownCodeHash(bytes32); +// 0x9ba6061b +error UnsupportedOperation(); +// 0xff15b069 +error UnsupportedPaymasterFlow(); +// 0x17a84415 +error UnsupportedTxType(uint256); +// 0x5708aead +error UpgradeMustBeFirstTxn(); +// 0x626ade30 +error ValueMismatch(uint256 expected, uint256 actual); +// 0x460b9939 +error ValuesNotEqual(uint256 expected, uint256 actual); +// 0x6818f3f9 +error ZeroNonceError(); + +enum CodeHashReason { + NotContractOnConstructor, + NotConstructedContract +} + +enum SigField { + Length, + V, + S +} + +enum PubdataField { + NumberOfLogs, + LogsHash, + MsgHash, + Bytecode, + InputDAFunctionSig, + InputLogsHash, + InputLogsRootHash, + InputMsgsHash, + InputBytecodeHash, + Offset, + Length +} + +enum BytecodeError { + Version, + NumberOfWords, + Length, + WordsMustBeOdd, + DictionaryLength +} diff --git a/system-contracts/contracts/interfaces/ISystemContract.sol b/system-contracts/contracts/abstract/SystemContractBase.sol similarity index 57% rename from system-contracts/contracts/interfaces/ISystemContract.sol rename to system-contracts/contracts/abstract/SystemContractBase.sol index 01ff9d95f..b0bdc36b5 100644 --- a/system-contracts/contracts/interfaces/ISystemContract.sol +++ b/system-contracts/contracts/abstract/SystemContractBase.sol @@ -1,9 +1,10 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {SystemContractHelper} from "../libraries/SystemContractHelper.sol"; -import {BOOTLOADER_FORMAL_ADDRESS, FORCE_DEPLOYER} from "../Constants.sol"; +import {BOOTLOADER_FORMAL_ADDRESS} from "../Constants.sol"; +import {SystemCallFlagRequired, Unauthorized, CallerMustBeSystemContract, CallerMustBeBootloader} from "../SystemContractErrors.sol"; /** * @author Matter Labs @@ -14,45 +15,40 @@ import {BOOTLOADER_FORMAL_ADDRESS, FORCE_DEPLOYER} from "../Constants.sol"; * @dev Never add storage variables into this contract as some * system contracts rely on this abstract contract as on interface! */ -abstract contract ISystemContract { +abstract contract SystemContractBase { /// @notice Modifier that makes sure that the method /// can only be called via a system call. modifier onlySystemCall() { - require( - SystemContractHelper.isSystemCall() || SystemContractHelper.isSystemContract(msg.sender), - "This method require system call flag" - ); + if (!SystemContractHelper.isSystemCall() && !SystemContractHelper.isSystemContract(msg.sender)) { + revert SystemCallFlagRequired(); + } _; } /// @notice Modifier that makes sure that the method /// can only be called from a system contract. modifier onlyCallFromSystemContract() { - require( - SystemContractHelper.isSystemContract(msg.sender), - "This method require the caller to be system contract" - ); + if (!SystemContractHelper.isSystemContract(msg.sender)) { + revert CallerMustBeSystemContract(); + } _; } /// @notice Modifier that makes sure that the method /// can only be called from a special given address. modifier onlyCallFrom(address caller) { - require(msg.sender == caller, "Inappropriate caller"); + if (msg.sender != caller) { + revert Unauthorized(msg.sender); + } _; } /// @notice Modifier that makes sure that the method /// can only be called from the bootloader. modifier onlyCallFromBootloader() { - require(msg.sender == BOOTLOADER_FORMAL_ADDRESS, "Callable only by the bootloader"); - _; - } - - /// @notice Modifier that makes sure that the method - /// can only be called from the L1 force deployer. - modifier onlyCallFromForceDeployer() { - require(msg.sender == FORCE_DEPLOYER); + if (msg.sender != BOOTLOADER_FORMAL_ADDRESS) { + revert CallerMustBeBootloader(); + } _; } } diff --git a/system-contracts/contracts/interfaces/IAccount.sol b/system-contracts/contracts/interfaces/IAccount.sol index c32b35767..cebe91d17 100644 --- a/system-contracts/contracts/interfaces/IAccount.sol +++ b/system-contracts/contracts/interfaces/IAccount.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {Transaction} from "../libraries/TransactionHelper.sol"; diff --git a/system-contracts/contracts/interfaces/IAccountCodeStorage.sol b/system-contracts/contracts/interfaces/IAccountCodeStorage.sol index c266774ea..5183e77f6 100644 --- a/system-contracts/contracts/interfaces/IAccountCodeStorage.sol +++ b/system-contracts/contracts/interfaces/IAccountCodeStorage.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; interface IAccountCodeStorage { function storeAccountConstructingCodeHash(address _address, bytes32 _hash) external; diff --git a/system-contracts/contracts/interfaces/IBaseToken.sol b/system-contracts/contracts/interfaces/IBaseToken.sol index d15f2f123..fc32c7b83 100644 --- a/system-contracts/contracts/interfaces/IBaseToken.sol +++ b/system-contracts/contracts/interfaces/IBaseToken.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; interface IBaseToken { function balanceOf(uint256) external view returns (uint256); diff --git a/system-contracts/contracts/interfaces/IBootloaderUtilities.sol b/system-contracts/contracts/interfaces/IBootloaderUtilities.sol index 31413320a..e900bfb5e 100644 --- a/system-contracts/contracts/interfaces/IBootloaderUtilities.sol +++ b/system-contracts/contracts/interfaces/IBootloaderUtilities.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {Transaction} from "../libraries/TransactionHelper.sol"; diff --git a/system-contracts/contracts/interfaces/IBridgehub.sol b/system-contracts/contracts/interfaces/IBridgehub.sol new file mode 100644 index 000000000..210fc287a --- /dev/null +++ b/system-contracts/contracts/interfaces/IBridgehub.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +/// @author Matter Labs +/// @custom:security-contact security@matterlabs.dev +interface IBridgehub { + function setAddresses(address _assetRouter, address _ctmDeployer, address _messageRoot) external; + + function owner() external view returns (address); +} diff --git a/system-contracts/contracts/interfaces/IComplexUpgrader.sol b/system-contracts/contracts/interfaces/IComplexUpgrader.sol index 1b5e15182..3b1468417 100644 --- a/system-contracts/contracts/interfaces/IComplexUpgrader.sol +++ b/system-contracts/contracts/interfaces/IComplexUpgrader.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs diff --git a/system-contracts/contracts/interfaces/ICompressor.sol b/system-contracts/contracts/interfaces/ICompressor.sol index 3062ea4f7..854aa7904 100644 --- a/system-contracts/contracts/interfaces/ICompressor.sol +++ b/system-contracts/contracts/interfaces/ICompressor.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; // The bitmask by applying which to the compressed state diff metadata we retrieve its operation. uint8 constant OPERATION_BITMASK = 7; diff --git a/system-contracts/contracts/interfaces/IContractDeployer.sol b/system-contracts/contracts/interfaces/IContractDeployer.sol index 3f84672d7..f72aa19d4 100644 --- a/system-contracts/contracts/interfaces/IContractDeployer.sol +++ b/system-contracts/contracts/interfaces/IContractDeployer.sol @@ -1,6 +1,20 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +/// @notice A struct that describes a forced deployment on an address +struct ForceDeployment { + // The bytecode hash to put on an address + bytes32 bytecodeHash; + // The address on which to deploy the bytecodehash to + address newAddress; + // Whether to run the constructor on the force deployment + bool callConstructor; + // The value with which to initialize a contract + uint256 value; + // The constructor calldata + bytes input; +} interface IContractDeployer { /// @notice Defines the version of the account abstraction protocol @@ -88,4 +102,7 @@ interface IContractDeployer { /// @notice Can be called by an account to update its nonce ordering function updateNonceOrdering(AccountNonceOrdering _nonceOrdering) external; + + /// @notice This method is to be used only during an upgrade to set bytecodes on specific addresses. + function forceDeployOnAddresses(ForceDeployment[] calldata _deployments) external payable; } diff --git a/system-contracts/contracts/interfaces/IImmutableSimulator.sol b/system-contracts/contracts/interfaces/IImmutableSimulator.sol index d30ac9b96..840053849 100644 --- a/system-contracts/contracts/interfaces/IImmutableSimulator.sol +++ b/system-contracts/contracts/interfaces/IImmutableSimulator.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; struct ImmutableData { uint256 index; diff --git a/system-contracts/contracts/interfaces/IKnownCodesStorage.sol b/system-contracts/contracts/interfaces/IKnownCodesStorage.sol index 98a1277d0..551cfb0d8 100644 --- a/system-contracts/contracts/interfaces/IKnownCodesStorage.sol +++ b/system-contracts/contracts/interfaces/IKnownCodesStorage.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs diff --git a/system-contracts/contracts/interfaces/IL1Messenger.sol b/system-contracts/contracts/interfaces/IL1Messenger.sol index cd0cc90f7..88e2c81d8 100644 --- a/system-contracts/contracts/interfaces/IL1Messenger.sol +++ b/system-contracts/contracts/interfaces/IL1Messenger.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /// @dev The log passed from L2 /// @param l2ShardId The shard identifier, 0 - rollup, 1 - porter. All other values are not used but are reserved for the future diff --git a/system-contracts/contracts/interfaces/IL2DAValidator.sol b/system-contracts/contracts/interfaces/IL2DAValidator.sol new file mode 100644 index 000000000..02e5bf953 --- /dev/null +++ b/system-contracts/contracts/interfaces/IL2DAValidator.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +interface IL2DAValidator { + function validatePubdata( + // The rolling hash of the user L2->L1 logs. + bytes32 _chainedLogsHash, + // The root hash of the user L2->L1 logs. + bytes32 _logsRootHash, + // The chained hash of the L2->L1 messages + bytes32 _chainedMessagesHash, + // The chained hash of uncompressed bytecodes sent to L1 + bytes32 _chainedBytecodesHash, + // Same operator input + bytes calldata _totalL2ToL1PubdataAndStateDiffs + ) external returns (bytes32 outputHash); +} diff --git a/system-contracts/contracts/interfaces/IL2GenesisUpgrade.sol b/system-contracts/contracts/interfaces/IL2GenesisUpgrade.sol new file mode 100644 index 000000000..8752202a5 --- /dev/null +++ b/system-contracts/contracts/interfaces/IL2GenesisUpgrade.sol @@ -0,0 +1,38 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +struct ZKChainSpecificForceDeploymentsData { + bytes32 baseTokenAssetId; + address l2LegacySharedBridge; + address l2Weth; +} + +// solhint-disable-next-line gas-struct-packing +struct FixedForceDeploymentsData { + uint256 l1ChainId; + uint256 eraChainId; + address l1AssetRouter; + bytes32 l2TokenProxyBytecodeHash; + address aliasedL1Governance; + uint256 maxNumberOfZKChains; + bytes32 bridgehubBytecodeHash; + bytes32 l2AssetRouterBytecodeHash; + bytes32 l2NtvBytecodeHash; + bytes32 messageRootBytecodeHash; + address l2SharedBridgeLegacyImpl; + address l2BridgedStandardERC20Impl; + address l2BridgeProxyOwnerAddress; + address l2BridgedStandardERC20ProxyOwnerAddress; +} + +interface IL2GenesisUpgrade { + event UpgradeComplete(uint256 _chainId); + + function genesisUpgrade( + uint256 _chainId, + address _ctmDeployer, + bytes calldata _fixedForceDeploymentsData, + bytes calldata _additionalForceDeploymentsData + ) external payable; +} diff --git a/l2-contracts/contracts/bridge/interfaces/IL2SharedBridge.sol b/system-contracts/contracts/interfaces/IL2SharedBridgeLegacy.sol similarity index 58% rename from l2-contracts/contracts/bridge/interfaces/IL2SharedBridge.sol rename to system-contracts/contracts/interfaces/IL2SharedBridgeLegacy.sol index c1aa05102..05d86757e 100644 --- a/l2-contracts/contracts/bridge/interfaces/IL2SharedBridge.sol +++ b/system-contracts/contracts/interfaces/IL2SharedBridgeLegacy.sol @@ -1,9 +1,12 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity ^0.8.20; + +import {UpgradeableBeacon} from "@openzeppelin/contracts-v4/proxy/beacon/UpgradeableBeacon.sol"; /// @author Matter Labs -interface IL2SharedBridge { +/// @custom:security-contact security@matterlabs.dev +interface IL2SharedBridgeLegacy { event FinalizeDeposit( address indexed l1Sender, address indexed l2Receiver, @@ -11,20 +14,7 @@ interface IL2SharedBridge { uint256 amount ); - event WithdrawalInitiated( - address indexed l2Sender, - address indexed l1Receiver, - address indexed l2Token, - uint256 amount - ); - - function finalizeDeposit( - address _l1Sender, - address _l2Receiver, - address _l1Token, - uint256 _amount, - bytes calldata _data - ) external; + function l2TokenBeacon() external returns (UpgradeableBeacon); function withdraw(address _l1Receiver, address _l2Token, uint256 _amount) external; @@ -35,4 +25,8 @@ interface IL2SharedBridge { function l1Bridge() external view returns (address); function l1SharedBridge() external view returns (address); + + function deployBeaconProxy(bytes32 _salt) external returns (address); + + function sendMessageToL1(bytes calldata _message) external; } diff --git a/system-contracts/contracts/interfaces/IL2StandardToken.sol b/system-contracts/contracts/interfaces/IL2StandardToken.sol index 3d75c8ede..d67a3ea1f 100644 --- a/system-contracts/contracts/interfaces/IL2StandardToken.sol +++ b/system-contracts/contracts/interfaces/IL2StandardToken.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; interface IL2StandardToken { event BridgeMint(address indexed _account, uint256 _amount); diff --git a/system-contracts/contracts/interfaces/IMailbox.sol b/system-contracts/contracts/interfaces/IMailbox.sol index ba673058c..a9dcdad05 100644 --- a/system-contracts/contracts/interfaces/IMailbox.sol +++ b/system-contracts/contracts/interfaces/IMailbox.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; interface IMailbox { function finalizeEthWithdrawal( diff --git a/system-contracts/contracts/interfaces/IMessageRoot.sol b/system-contracts/contracts/interfaces/IMessageRoot.sol new file mode 100644 index 000000000..854508eb1 --- /dev/null +++ b/system-contracts/contracts/interfaces/IMessageRoot.sol @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: MIT +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; + +interface IMessageRoot { + function getAggregatedRoot() external view returns (bytes32 aggregatedRoot); +} diff --git a/system-contracts/contracts/interfaces/INonceHolder.sol b/system-contracts/contracts/interfaces/INonceHolder.sol index 1213fbea4..ce3b0279d 100644 --- a/system-contracts/contracts/interfaces/INonceHolder.sol +++ b/system-contracts/contracts/interfaces/INonceHolder.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs diff --git a/system-contracts/contracts/interfaces/IPaymaster.sol b/system-contracts/contracts/interfaces/IPaymaster.sol index 7b06d86ee..1c8af5b28 100644 --- a/system-contracts/contracts/interfaces/IPaymaster.sol +++ b/system-contracts/contracts/interfaces/IPaymaster.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {Transaction} from "../libraries/TransactionHelper.sol"; diff --git a/system-contracts/contracts/interfaces/IPaymasterFlow.sol b/system-contracts/contracts/interfaces/IPaymasterFlow.sol index 38866073e..4c9683fd4 100644 --- a/system-contracts/contracts/interfaces/IPaymasterFlow.sol +++ b/system-contracts/contracts/interfaces/IPaymasterFlow.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs diff --git a/system-contracts/contracts/interfaces/IPubdataChunkPublisher.sol b/system-contracts/contracts/interfaces/IPubdataChunkPublisher.sol index 83c1893fd..b422bb359 100644 --- a/system-contracts/contracts/interfaces/IPubdataChunkPublisher.sol +++ b/system-contracts/contracts/interfaces/IPubdataChunkPublisher.sol @@ -1,5 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs @@ -9,5 +10,6 @@ pragma solidity 0.8.20; interface IPubdataChunkPublisher { /// @notice Chunks pubdata into pieces that can fit into blobs. /// @param _pubdata The total l2 to l1 pubdata that will be sent via L1 blobs. - function chunkAndPublishPubdata(bytes calldata _pubdata) external; + /// @dev Note: This is an early implementation, in the future we plan to support up to 16 blobs per l1 batch. + function chunkPubdataToBlobs(bytes calldata _pubdata) external pure returns (bytes32[] memory blobLinearHashes); } diff --git a/system-contracts/contracts/interfaces/ISystemContext.sol b/system-contracts/contracts/interfaces/ISystemContext.sol index a122a04f5..ff083fd0b 100644 --- a/system-contracts/contracts/interfaces/ISystemContext.sol +++ b/system-contracts/contracts/interfaces/ISystemContext.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs @@ -58,4 +58,6 @@ interface ISystemContext { function gasPerPubdataByte() external view returns (uint256 gasPerPubdataByte); function getCurrentPubdataSpent() external view returns (uint256 currentPubdataSpent); + + function setChainId(uint256 _newChainId) external; } diff --git a/system-contracts/contracts/interfaces/ISystemContextDeprecated.sol b/system-contracts/contracts/interfaces/ISystemContextDeprecated.sol index a44b61b23..ac5153270 100644 --- a/system-contracts/contracts/interfaces/ISystemContextDeprecated.sol +++ b/system-contracts/contracts/interfaces/ISystemContextDeprecated.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs diff --git a/system-contracts/contracts/libraries/EfficientCall.sol b/system-contracts/contracts/libraries/EfficientCall.sol index 8f9939f08..27fea6396 100644 --- a/system-contracts/contracts/libraries/EfficientCall.sol +++ b/system-contracts/contracts/libraries/EfficientCall.sol @@ -1,11 +1,12 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {SystemContractHelper, ADDRESS_MASK} from "./SystemContractHelper.sol"; import {SystemContractsCaller, CalldataForwardingMode, RAW_FAR_CALL_BY_REF_CALL_ADDRESS, SYSTEM_CALL_BY_REF_CALL_ADDRESS, MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, MIMIC_CALL_BY_REF_CALL_ADDRESS} from "./SystemContractsCaller.sol"; import {Utils} from "./Utils.sol"; import {SHA256_SYSTEM_CONTRACT, KECCAK256_SYSTEM_CONTRACT, MSG_VALUE_SYSTEM_CONTRACT} from "../Constants.sol"; +import {Keccak256InvalidReturnData, ShaInvalidReturnData} from "../SystemContractErrors.sol"; /** * @author Matter Labs @@ -36,7 +37,9 @@ library EfficientCall { /// @return The `keccak256` hash. function keccak(bytes calldata _data) internal view returns (bytes32) { bytes memory returnData = staticCall(gasleft(), KECCAK256_SYSTEM_CONTRACT, _data); - require(returnData.length == 32, "keccak256 returned invalid data"); + if (returnData.length != 32) { + revert Keccak256InvalidReturnData(); + } return bytes32(returnData); } @@ -45,7 +48,9 @@ library EfficientCall { /// @return The `sha256` hash. function sha(bytes calldata _data) internal view returns (bytes32) { bytes memory returnData = staticCall(gasleft(), SHA256_SYSTEM_CONTRACT, _data); - require(returnData.length == 32, "sha returned invalid data"); + if (returnData.length != 32) { + revert ShaInvalidReturnData(); + } return bytes32(returnData); } diff --git a/system-contracts/contracts/libraries/RLPEncoder.sol b/system-contracts/contracts/libraries/RLPEncoder.sol index 8e32ea9ba..16eaa4053 100644 --- a/system-contracts/contracts/libraries/RLPEncoder.sol +++ b/system-contracts/contracts/libraries/RLPEncoder.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs @@ -100,7 +100,7 @@ library RLPEncoder { hbs += 2; } if (_number > type(uint8).max) { - hbs += 1; + ++hbs; } } } diff --git a/system-contracts/contracts/libraries/SystemContractHelper.sol b/system-contracts/contracts/libraries/SystemContractHelper.sol index 7ae75b520..d3d5e7536 100644 --- a/system-contracts/contracts/libraries/SystemContractHelper.sol +++ b/system-contracts/contracts/libraries/SystemContractHelper.sol @@ -1,10 +1,11 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {MAX_SYSTEM_CONTRACT_ADDRESS} from "../Constants.sol"; -import {CALLFLAGS_CALL_ADDRESS, CODE_ADDRESS_CALL_ADDRESS, EVENT_WRITE_ADDRESS, EVENT_INITIALIZE_ADDRESS, GET_EXTRA_ABI_DATA_ADDRESS, LOAD_CALLDATA_INTO_ACTIVE_PTR_CALL_ADDRESS, META_CODE_SHARD_ID_OFFSET, META_CALLER_SHARD_ID_OFFSET, META_SHARD_ID_OFFSET, META_AUX_HEAP_SIZE_OFFSET, META_HEAP_SIZE_OFFSET, META_PUBDATA_PUBLISHED_OFFSET, META_CALL_ADDRESS, PTR_CALLDATA_CALL_ADDRESS, PTR_ADD_INTO_ACTIVE_CALL_ADDRESS, PTR_SHRINK_INTO_ACTIVE_CALL_ADDRESS, PTR_PACK_INTO_ACTIVE_CALL_ADDRESS, PRECOMPILE_CALL_ADDRESS, SET_CONTEXT_VALUE_CALL_ADDRESS, TO_L1_CALL_ADDRESS} from "./SystemContractsCaller.sol"; +import {CalldataForwardingMode, SystemContractsCaller, MIMIC_CALL_CALL_ADDRESS, CALLFLAGS_CALL_ADDRESS, CODE_ADDRESS_CALL_ADDRESS, EVENT_WRITE_ADDRESS, EVENT_INITIALIZE_ADDRESS, GET_EXTRA_ABI_DATA_ADDRESS, LOAD_CALLDATA_INTO_ACTIVE_PTR_CALL_ADDRESS, META_CODE_SHARD_ID_OFFSET, META_CALLER_SHARD_ID_OFFSET, META_SHARD_ID_OFFSET, META_AUX_HEAP_SIZE_OFFSET, META_HEAP_SIZE_OFFSET, META_PUBDATA_PUBLISHED_OFFSET, META_CALL_ADDRESS, PTR_CALLDATA_CALL_ADDRESS, PTR_ADD_INTO_ACTIVE_CALL_ADDRESS, PTR_SHRINK_INTO_ACTIVE_CALL_ADDRESS, PTR_PACK_INTO_ACTIVE_CALL_ADDRESS, PRECOMPILE_CALL_ADDRESS, SET_CONTEXT_VALUE_CALL_ADDRESS, TO_L1_CALL_ADDRESS} from "./SystemContractsCaller.sol"; +import {IndexOutOfBounds, FailedToChargeGas} from "../SystemContractErrors.sol"; uint256 constant UINT32_MASK = type(uint32).max; uint256 constant UINT64_MASK = type(uint64).max; @@ -318,7 +319,10 @@ library SystemContractHelper { /// @dev It is equal to the value of the (N+2)-th register /// at the start of the call. function getExtraAbiData(uint256 index) internal view returns (uint256 extraAbiData) { - require(index < 10, "There are only 10 accessible registers"); + // Note that there are only 10 accessible registers (indices 0-9 inclusively) + if (index > 9) { + revert IndexOutOfBounds(); + } address callAddr = GET_EXTRA_ABI_DATA_ADDRESS; assembly { @@ -350,6 +354,66 @@ library SystemContractHelper { _gasToPay, _pubdataToSpend ); - require(precompileCallSuccess, "Failed to charge gas"); + if (!precompileCallSuccess) { + revert FailedToChargeGas(); + } + } + + /// @notice Performs a `mimicCall` to an address. + /// @param _to The address to call. + /// @param _whoToMimic The address to mimic. + /// @param _data The data to pass to the call. + /// @return success Whether the call was successful. + /// @return returndata The return data of the call. + function mimicCall( + address _to, + address _whoToMimic, + bytes memory _data + ) internal returns (bool success, bytes memory returndata) { + // In zkSync, no memory-related values can exceed uint32, so it is safe to convert here + uint32 dataStart; + uint32 dataLength = uint32(_data.length); + assembly { + dataStart := add(_data, 0x20) + } + + uint256 farCallAbi = SystemContractsCaller.getFarCallABI({ + dataOffset: 0, + memoryPage: 0, + dataStart: dataStart, + dataLength: dataLength, + gasPassed: uint32(gasleft()), + shardId: 0, + forwardingMode: CalldataForwardingMode.UseHeap, + isConstructorCall: false, + isSystemCall: false + }); + + address callAddr = MIMIC_CALL_CALL_ADDRESS; + uint256 rtSize; + assembly { + success := call(_to, callAddr, 0, farCallAbi, _whoToMimic, 0, 0) + rtSize := returndatasize() + } + + returndata = new bytes(rtSize); + assembly { + returndatacopy(add(returndata, 0x20), 0, rtSize) + } + } + + /// @notice Performs a `mimicCall` to an address, while ensuring that the call + /// was successful + /// @param _to The address to call. + /// @param _whoToMimic The address to mimic. + /// @param _data The data to pass to the call. + function mimicCallWithPropagatedRevert(address _to, address _whoToMimic, bytes memory _data) internal { + (bool success, bytes memory returnData) = mimicCall(_to, _whoToMimic, _data); + if (!success) { + // Propagate revert reason + assembly { + revert(add(returnData, 0x20), returndatasize()) + } + } } } diff --git a/system-contracts/contracts/libraries/SystemContractsCaller.sol b/system-contracts/contracts/libraries/SystemContractsCaller.sol index d964fbbe7..9497b0c52 100644 --- a/system-contracts/contracts/libraries/SystemContractsCaller.sol +++ b/system-contracts/contracts/libraries/SystemContractsCaller.sol @@ -1,12 +1,12 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {MSG_VALUE_SYSTEM_CONTRACT, MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT} from "../Constants.sol"; import {Utils} from "./Utils.sol"; // Addresses used for the compiler to be replaced with the -// zkSync-specific opcodes during the compilation. +// ZKsync-specific opcodes during the compilation. // IMPORTANT: these are just compile-time constants and are used // only if used in-place by Yul optimizer. address constant TO_L1_CALL_ADDRESS = address((1 << 16) - 1); @@ -80,7 +80,7 @@ library SystemContractsCaller { assembly { dataStart := add(data, 0x20) } - uint32 dataLength = uint32(Utils.safeCastToU32(data.length)); + uint32 dataLength = Utils.safeCastToU32(data.length); uint256 farCallAbi = SystemContractsCaller.getFarCallABI({ dataOffset: 0, diff --git a/system-contracts/contracts/libraries/TransactionHelper.sol b/system-contracts/contracts/libraries/TransactionHelper.sol index 9a2921010..467eb57f9 100644 --- a/system-contracts/contracts/libraries/TransactionHelper.sol +++ b/system-contracts/contracts/libraries/TransactionHelper.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {IERC20} from "../openzeppelin/token/ERC20/IERC20.sol"; import {SafeERC20} from "../openzeppelin/token/ERC20/utils/SafeERC20.sol"; @@ -9,8 +9,9 @@ import {IPaymasterFlow} from "../interfaces/IPaymasterFlow.sol"; import {BASE_TOKEN_SYSTEM_CONTRACT, BOOTLOADER_FORMAL_ADDRESS} from "../Constants.sol"; import {RLPEncoder} from "./RLPEncoder.sol"; import {EfficientCall} from "./EfficientCall.sol"; +import {UnsupportedTxType, InvalidInput, UnsupportedPaymasterFlow} from "../SystemContractErrors.sol"; -/// @dev The type id of zkSync's EIP-712-signed transaction. +/// @dev The type id of ZKsync's EIP-712-signed transaction. uint8 constant EIP_712_TX_TYPE = 0x71; /// @dev The type id of legacy transactions. @@ -20,7 +21,7 @@ uint8 constant EIP_2930_TX_TYPE = 0x01; /// @dev The type id of EIP1559 transactions. uint8 constant EIP_1559_TX_TYPE = 0x02; -/// @notice Structure used to represent a zkSync transaction. +/// @notice Structure used to represent a ZKsync transaction. struct Transaction { // The type of the transaction. uint256 txType; @@ -78,9 +79,10 @@ library TransactionHelper { using SafeERC20 for IERC20; /// @notice The EIP-712 typehash for the contract's domain - bytes32 constant EIP712_DOMAIN_TYPEHASH = keccak256("EIP712Domain(string name,string version,uint256 chainId)"); + bytes32 internal constant EIP712_DOMAIN_TYPEHASH = + keccak256("EIP712Domain(string name,string version,uint256 chainId)"); - bytes32 constant EIP712_TRANSACTION_TYPE_HASH = + bytes32 internal constant EIP712_TRANSACTION_TYPE_HASH = keccak256( "Transaction(uint256 txType,uint256 from,uint256 to,uint256 gasLimit,uint256 gasPerPubdataByteLimit,uint256 maxFeePerGas,uint256 maxPriorityFeePerGas,uint256 paymaster,uint256 nonce,uint256 value,bytes data,bytes32[] factoryDeps,bytes paymasterInput)" ); @@ -108,11 +110,11 @@ library TransactionHelper { } else { // Currently no other transaction types are supported. // Any new transaction types will be processed in a similar manner. - revert("Encoding unsupported tx"); + revert UnsupportedTxType(_transaction.txType); } } - /// @notice Encode hash of the zkSync native transaction type. + /// @notice Encode hash of the ZKsync native transaction type. /// @return keccak256 hash of the EIP-712 encoded representation of transaction function _encodeHashEIP712Transaction(Transaction calldata _transaction) private view returns (bytes32) { bytes32 structHash = keccak256( @@ -221,7 +223,7 @@ library TransactionHelper { // Hash of EIP2930 transactions is encoded the following way: // H(0x01 || RLP(chain_id, nonce, gas_price, gas_limit, destination, amount, data, access_list)) // - // Note, that on zkSync access lists are not supported and should always be empty. + // Note, that on ZKsync access lists are not supported and should always be empty. // Encode all fixed-length params to avoid "stack too deep error" bytes memory encodedFixedLengthParams; @@ -259,7 +261,7 @@ library TransactionHelper { // Otherwise the length is not encoded at all. } - // On zkSync, access lists are always zero length (at least for now). + // On ZKsync, access lists are always zero length (at least for now). bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); bytes memory encodedListLength; @@ -293,7 +295,7 @@ library TransactionHelper { // Hash of EIP1559 transactions is encoded the following way: // H(0x02 || RLP(chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, destination, amount, data, access_list)) // - // Note, that on zkSync access lists are not supported and should always be empty. + // Note, that on ZKsync access lists are not supported and should always be empty. // Encode all fixed-length params to avoid "stack too deep error" bytes memory encodedFixedLengthParams; @@ -333,7 +335,7 @@ library TransactionHelper { // Otherwise the length is not encoded at all. } - // On zkSync, access lists are always zero length (at least for now). + // On ZKsync, access lists are always zero length (at least for now). bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); bytes memory encodedListLength; @@ -365,14 +367,15 @@ library TransactionHelper { /// for tokens, etc. For more information on the expected behavior, check out /// the "Paymaster flows" section in the documentation. function processPaymasterInput(Transaction calldata _transaction) internal { - require(_transaction.paymasterInput.length >= 4, "The standard paymaster input must be at least 4 bytes long"); + if (_transaction.paymasterInput.length < 4) { + revert InvalidInput(); + } bytes4 paymasterInputSelector = bytes4(_transaction.paymasterInput[0:4]); if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { - require( - _transaction.paymasterInput.length >= 68, - "The approvalBased paymaster input must be at least 68 bytes long" - ); + if (_transaction.paymasterInput.length < 68) { + revert InvalidInput(); + } // While the actual data consists of address, uint256 and bytes data, // the data is needed only for the paymaster, so we ignore it here for the sake of optimization @@ -390,7 +393,7 @@ library TransactionHelper { } else if (paymasterInputSelector == IPaymasterFlow.general.selector) { // Do nothing. general(bytes) paymaster flow means that the paymaster must interpret these bytes on his own. } else { - revert("Unsupported paymaster flow"); + revert UnsupportedPaymasterFlow(); } } diff --git a/system-contracts/contracts/libraries/UnsafeBytesCalldata.sol b/system-contracts/contracts/libraries/UnsafeBytesCalldata.sol index 4ce65f5fb..82b4c5c1d 100644 --- a/system-contracts/contracts/libraries/UnsafeBytesCalldata.sol +++ b/system-contracts/contracts/libraries/UnsafeBytesCalldata.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT - -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; /** * @author Matter Labs diff --git a/system-contracts/contracts/libraries/Utils.sol b/system-contracts/contracts/libraries/Utils.sol index 5fa7eec6f..fc23de94b 100644 --- a/system-contracts/contracts/libraries/Utils.sol +++ b/system-contracts/contracts/libraries/Utils.sol @@ -1,36 +1,44 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. +pragma solidity ^0.8.20; import {EfficientCall} from "./EfficientCall.sol"; +import {MalformedBytecode, BytecodeError, Overflow} from "../SystemContractErrors.sol"; /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev - * @dev Common utilities used in zkSync system contracts + * @dev Common utilities used in ZKsync system contracts */ library Utils { /// @dev Bit mask of bytecode hash "isConstructor" marker - bytes32 constant IS_CONSTRUCTOR_BYTECODE_HASH_BIT_MASK = + bytes32 internal constant IS_CONSTRUCTOR_BYTECODE_HASH_BIT_MASK = 0x00ff000000000000000000000000000000000000000000000000000000000000; /// @dev Bit mask to set the "isConstructor" marker in the bytecode hash - bytes32 constant SET_IS_CONSTRUCTOR_MARKER_BIT_MASK = + bytes32 internal constant SET_IS_CONSTRUCTOR_MARKER_BIT_MASK = 0x0001000000000000000000000000000000000000000000000000000000000000; function safeCastToU128(uint256 _x) internal pure returns (uint128) { - require(_x <= type(uint128).max, "Overflow"); + if (_x > type(uint128).max) { + revert Overflow(); + } return uint128(_x); } function safeCastToU32(uint256 _x) internal pure returns (uint32) { - require(_x <= type(uint32).max, "Overflow"); + if (_x > type(uint32).max) { + revert Overflow(); + } return uint32(_x); } function safeCastToU24(uint256 _x) internal pure returns (uint24) { - require(_x <= type(uint24).max, "Overflow"); + if (_x > type(uint24).max) { + revert Overflow(); + } return uint24(_x); } @@ -81,11 +89,19 @@ library Utils { /// - Bytecode words length is not odd function hashL2Bytecode(bytes calldata _bytecode) internal view returns (bytes32 hashedBytecode) { // Note that the length of the bytecode must be provided in 32-byte words. - require(_bytecode.length % 32 == 0, "po"); + if (_bytecode.length % 32 != 0) { + revert MalformedBytecode(BytecodeError.Length); + } uint256 lengthInWords = _bytecode.length / 32; - require(lengthInWords < 2 ** 16, "pp"); // bytecode length must be less than 2^16 words - require(lengthInWords % 2 == 1, "pr"); // bytecode length in words must be odd + // bytecode length must be less than 2^16 words + if (lengthInWords >= 2 ** 16) { + revert MalformedBytecode(BytecodeError.NumberOfWords); + } + // bytecode length in words must be odd + if (lengthInWords % 2 == 0) { + revert MalformedBytecode(BytecodeError.WordsMustBeOdd); + } hashedBytecode = EfficientCall.sha(_bytecode) & 0x00000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF; diff --git a/system-contracts/contracts/openzeppelin/token/ERC20/IERC20.sol b/system-contracts/contracts/openzeppelin/token/ERC20/IERC20.sol index b816bfed0..18b39a7a9 100644 --- a/system-contracts/contracts/openzeppelin/token/ERC20/IERC20.sol +++ b/system-contracts/contracts/openzeppelin/token/ERC20/IERC20.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT // OpenZeppelin Contracts (last updated v4.6.0) (token/ERC20/IERC20.sol) - +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. pragma solidity ^0.8.0; /** diff --git a/system-contracts/contracts/openzeppelin/token/ERC20/extensions/IERC20Permit.sol b/system-contracts/contracts/openzeppelin/token/ERC20/extensions/IERC20Permit.sol index bb43e53b6..5e0875438 100644 --- a/system-contracts/contracts/openzeppelin/token/ERC20/extensions/IERC20Permit.sol +++ b/system-contracts/contracts/openzeppelin/token/ERC20/extensions/IERC20Permit.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT // OpenZeppelin Contracts v4.4.1 (token/ERC20/extensions/IERC20Permit.sol) - +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. pragma solidity ^0.8.0; /** diff --git a/system-contracts/contracts/openzeppelin/token/ERC20/utils/SafeERC20.sol b/system-contracts/contracts/openzeppelin/token/ERC20/utils/SafeERC20.sol index 2ae0c4b0e..a23e6d1f7 100644 --- a/system-contracts/contracts/openzeppelin/token/ERC20/utils/SafeERC20.sol +++ b/system-contracts/contracts/openzeppelin/token/ERC20/utils/SafeERC20.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT // OpenZeppelin Contracts (last updated v4.8.0) (token/ERC20/utils/SafeERC20.sol) - +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. pragma solidity ^0.8.0; import {IERC20} from "../IERC20.sol"; diff --git a/system-contracts/contracts/openzeppelin/utils/Address.sol b/system-contracts/contracts/openzeppelin/utils/Address.sol index 7a7d2d5d3..5d6de78c4 100644 --- a/system-contracts/contracts/openzeppelin/utils/Address.sol +++ b/system-contracts/contracts/openzeppelin/utils/Address.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT // OpenZeppelin Contracts (last updated v4.8.0) (utils/Address.sol) - +// We use a floating point pragma here so it can be used within other projects that interact with the ZKsync ecosystem without using our exact pragma version. pragma solidity ^0.8.1; /** diff --git a/system-contracts/contracts/precompiles/CodeOracle.yul b/system-contracts/contracts/precompiles/CodeOracle.yul index 820b8df70..63b386788 100644 --- a/system-contracts/contracts/precompiles/CodeOracle.yul +++ b/system-contracts/contracts/precompiles/CodeOracle.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev diff --git a/system-contracts/contracts/precompiles/EcAdd.yul b/system-contracts/contracts/precompiles/EcAdd.yul index 5771df8f9..8b7f25618 100644 --- a/system-contracts/contracts/precompiles/EcAdd.yul +++ b/system-contracts/contracts/precompiles/EcAdd.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + object "EcAdd" { code { return(0, 0) diff --git a/system-contracts/contracts/precompiles/EcMul.yul b/system-contracts/contracts/precompiles/EcMul.yul index 84838ec2a..63fd0bc42 100644 --- a/system-contracts/contracts/precompiles/EcMul.yul +++ b/system-contracts/contracts/precompiles/EcMul.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + object "EcMul" { code { return(0, 0) diff --git a/system-contracts/contracts/precompiles/EcPairing.yul b/system-contracts/contracts/precompiles/EcPairing.yul index 6ea6e92de..5e8011bcc 100644 --- a/system-contracts/contracts/precompiles/EcPairing.yul +++ b/system-contracts/contracts/precompiles/EcPairing.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + object "EcPairing" { code { return(0, 0) diff --git a/system-contracts/contracts/precompiles/Ecrecover.yul b/system-contracts/contracts/precompiles/Ecrecover.yul index cbb8fcc0f..9c64d509f 100644 --- a/system-contracts/contracts/precompiles/Ecrecover.yul +++ b/system-contracts/contracts/precompiles/Ecrecover.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev diff --git a/system-contracts/contracts/precompiles/Keccak256.yul b/system-contracts/contracts/precompiles/Keccak256.yul index 8eaa53671..397ee89bb 100644 --- a/system-contracts/contracts/precompiles/Keccak256.yul +++ b/system-contracts/contracts/precompiles/Keccak256.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev diff --git a/system-contracts/contracts/precompiles/P256Verify.yul b/system-contracts/contracts/precompiles/P256Verify.yul index 8cd14beb2..80b782209 100644 --- a/system-contracts/contracts/precompiles/P256Verify.yul +++ b/system-contracts/contracts/precompiles/P256Verify.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev diff --git a/system-contracts/contracts/precompiles/SHA256.yul b/system-contracts/contracts/precompiles/SHA256.yul index ff52632cd..8173502ef 100644 --- a/system-contracts/contracts/precompiles/SHA256.yul +++ b/system-contracts/contracts/precompiles/SHA256.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + /** * @author Matter Labs * @custom:security-contact security@matterlabs.dev diff --git a/system-contracts/contracts/precompiles/test-contracts/Keccak256Mock.yul b/system-contracts/contracts/precompiles/test-contracts/Keccak256Mock.yul index b37eb69ca..e3cb9ac1e 100644 --- a/system-contracts/contracts/precompiles/test-contracts/Keccak256Mock.yul +++ b/system-contracts/contracts/precompiles/test-contracts/Keccak256Mock.yul @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: MIT + /** * @author Matter Labs * @notice The contract used to emulate EVM's keccak256 opcode. diff --git a/system-contracts/contracts/test-contracts/AlwaysRevert.sol b/system-contracts/contracts/test-contracts/AlwaysRevert.sol index 902117487..3c9d469ce 100644 --- a/system-contracts/contracts/test-contracts/AlwaysRevert.sol +++ b/system-contracts/contracts/test-contracts/AlwaysRevert.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; +pragma solidity ^0.8.20; contract AlwaysRevert { fallback() external { diff --git a/system-contracts/contracts/test-contracts/CodeOracleTest.sol b/system-contracts/contracts/test-contracts/CodeOracleTest.sol index 4db306fb6..31de9d366 100644 --- a/system-contracts/contracts/test-contracts/CodeOracleTest.sol +++ b/system-contracts/contracts/test-contracts/CodeOracleTest.sol @@ -1,6 +1,6 @@ -// SPDX-License-Identifier: UNLICENSED +// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; +pragma solidity ^0.8.20; address constant REAL_CODE_ORACLE_ADDR = 0x0000000000000000000000000000000000008011; diff --git a/system-contracts/contracts/test-contracts/DelegateCaller.sol b/system-contracts/contracts/test-contracts/DelegateCaller.sol index caa5aae6b..a28cc0167 100644 --- a/system-contracts/contracts/test-contracts/DelegateCaller.sol +++ b/system-contracts/contracts/test-contracts/DelegateCaller.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; +pragma solidity ^0.8.20; contract DelegateCaller { function delegateCall(address _to) external payable { diff --git a/system-contracts/contracts/test-contracts/Deployable.sol b/system-contracts/contracts/test-contracts/Deployable.sol index be35861a4..8178eadb4 100644 --- a/system-contracts/contracts/test-contracts/Deployable.sol +++ b/system-contracts/contracts/test-contracts/Deployable.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; contract Deployable { event Deployed(uint256 value, bytes data); diff --git a/system-contracts/contracts/test-contracts/DummyBridgehub.sol b/system-contracts/contracts/test-contracts/DummyBridgehub.sol new file mode 100644 index 000000000..4beadb4ce --- /dev/null +++ b/system-contracts/contracts/test-contracts/DummyBridgehub.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +contract DummyBridgehub { + address public owner; + + constructor(uint256 _l1ChainId, address _aliasedL1Governance, uint256 _maxNumberOfZKChains) { + owner = _aliasedL1Governance; + } +} diff --git a/system-contracts/contracts/test-contracts/DummyL2AssetRouter.sol b/system-contracts/contracts/test-contracts/DummyL2AssetRouter.sol new file mode 100644 index 000000000..65796aa3f --- /dev/null +++ b/system-contracts/contracts/test-contracts/DummyL2AssetRouter.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +contract DummyL2AssetRouter { + constructor( + uint256 _l1ChainId, + address _l1AssetRouter, + address _aliasedL1Governance, + bytes32 _baseTokenAssetId, + uint256 _maxNumberOfZKChains + ) {} +} diff --git a/system-contracts/contracts/test-contracts/DummyL2NativeTokenVault.sol b/system-contracts/contracts/test-contracts/DummyL2NativeTokenVault.sol new file mode 100644 index 000000000..1832237d2 --- /dev/null +++ b/system-contracts/contracts/test-contracts/DummyL2NativeTokenVault.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +contract DummyL2NativeTokenVault { + constructor( + uint256 _l1ChainId, + address _aliasedL1Governance, + bytes32 _l2TokenProxyBytecodeHash, + address _bridgedTokenBeacon, + bool _contractsDeployedAlready, + address _wethToken, + bytes32 _baseTokenAssetId + ) {} +} diff --git a/system-contracts/contracts/test-contracts/DummyMessageRoot.sol b/system-contracts/contracts/test-contracts/DummyMessageRoot.sol new file mode 100644 index 000000000..d49cdd50f --- /dev/null +++ b/system-contracts/contracts/test-contracts/DummyMessageRoot.sol @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.24; + +contract DummyMessageRoot { + constructor(address) {} +} diff --git a/system-contracts/contracts/test-contracts/KeccakTest.sol b/system-contracts/contracts/test-contracts/KeccakTest.sol index 19ce77ea1..79581afc4 100644 --- a/system-contracts/contracts/test-contracts/KeccakTest.sol +++ b/system-contracts/contracts/test-contracts/KeccakTest.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; +pragma solidity ^0.8.20; pragma abicoder v2; import {LOAD_LATEST_RETURNDATA_INTO_ACTIVE_PTR_CALL_ADDRESS, PTR_PACK_INTO_ACTIVE_CALL_ADDRESS, SystemContractsCaller, CalldataForwardingMode, RAW_FAR_CALL_BY_REF_CALL_ADDRESS} from "../libraries/SystemContractsCaller.sol"; diff --git a/system-contracts/contracts/test-contracts/MockContract.sol b/system-contracts/contracts/test-contracts/MockContract.sol index 1505be34c..b7d9bcb55 100644 --- a/system-contracts/contracts/test-contracts/MockContract.sol +++ b/system-contracts/contracts/test-contracts/MockContract.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; contract MockContract { event Called(uint256 value, bytes data); diff --git a/system-contracts/contracts/test-contracts/SystemCaller.sol b/system-contracts/contracts/test-contracts/SystemCaller.sol index a377174ae..b51caec0b 100644 --- a/system-contracts/contracts/test-contracts/SystemCaller.sol +++ b/system-contracts/contracts/test-contracts/SystemCaller.sol @@ -1,6 +1,6 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.20; +pragma solidity 0.8.24; import {SYSTEM_CALL_CALL_ADDRESS, MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, SystemContractsCaller, CalldataForwardingMode} from "../libraries/SystemContractsCaller.sol"; import {Utils} from "../libraries/Utils.sol"; diff --git a/system-contracts/contracts/test-contracts/TransferTest.sol b/system-contracts/contracts/test-contracts/TransferTest.sol index 1342c5a6d..ca76a9932 100644 --- a/system-contracts/contracts/test-contracts/TransferTest.sol +++ b/system-contracts/contracts/test-contracts/TransferTest.sol @@ -1,6 +1,6 @@ -// SPDX-License-Identifier: UNLICENSED +// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; +pragma solidity ^0.8.20; contract TransferTest { function transfer(address payable to, uint256 amount, bool warmUpRecipient) public payable { diff --git a/system-contracts/foundry.toml b/system-contracts/foundry.toml new file mode 100644 index 000000000..ee3885489 --- /dev/null +++ b/system-contracts/foundry.toml @@ -0,0 +1,13 @@ +[profile.default] +src = "contracts-preprocessed" +out = "out" +libs = ["lib"] +cache_path = "cache-forge" +evm_version = "paris" +remappings = [ + "@openzeppelin/contracts/=lib/openzeppelin-contracts/contracts/", + "@openzeppelin/contracts-upgradeable/=lib/openzeppelin-contracts-upgradeable/contracts/", +] + +[profile.default.zksync] +zksolc = "1.5.0" diff --git a/system-contracts/hardhat.config.ts b/system-contracts/hardhat.config.ts index 68550e6c3..7ad59a41f 100644 --- a/system-contracts/hardhat.config.ts +++ b/system-contracts/hardhat.config.ts @@ -34,8 +34,9 @@ export default { ethNetwork: "http://localhost:8545", }, solidity: { - version: "0.8.20", + version: "0.8.24", settings: { + evmVersion: "cancun", optimizer: { enabled: true, runs: 9999999, diff --git a/system-contracts/lib/forge-std b/system-contracts/lib/forge-std new file mode 120000 index 000000000..edce15694 --- /dev/null +++ b/system-contracts/lib/forge-std @@ -0,0 +1 @@ +../../lib/forge-std \ No newline at end of file diff --git a/system-contracts/lib/openzeppelin-contracts b/system-contracts/lib/openzeppelin-contracts new file mode 120000 index 000000000..99aa45507 --- /dev/null +++ b/system-contracts/lib/openzeppelin-contracts @@ -0,0 +1 @@ +../../lib/openzeppelin-contracts \ No newline at end of file diff --git a/system-contracts/lib/openzeppelin-contracts-upgradeable b/system-contracts/lib/openzeppelin-contracts-upgradeable new file mode 120000 index 000000000..f1fc7a76a --- /dev/null +++ b/system-contracts/lib/openzeppelin-contracts-upgradeable @@ -0,0 +1 @@ +../../lib/openzeppelin-contracts-upgradeable \ No newline at end of file diff --git a/system-contracts/package.json b/system-contracts/package.json index 95900f654..b1010c7ab 100644 --- a/system-contracts/package.json +++ b/system-contracts/package.json @@ -4,8 +4,8 @@ "repository": "git@github.com:matter-labs/system-contracts.git", "license": "MIT", "dependencies": { - "@matterlabs/hardhat-zksync-deploy": "^0.6.5", - "@matterlabs/hardhat-zksync-solc": "^1.1.4", + "@matterlabs/hardhat-zksync-deploy": "^0.7.0", + "@matterlabs/hardhat-zksync-solc": "=1.1.4", "@matterlabs/hardhat-zksync-verify": "^1.4.3", "commander": "^9.4.1", "eslint": "^8.51.0", @@ -15,10 +15,12 @@ "fast-glob": "^3.3.2", "hardhat": "=2.22.2", "preprocess": "^3.2.0", - "zksync-ethers": "^5.9.0" + "zksync-ethers": "^5.9.0", + "@openzeppelin/contracts-upgradeable-v4": "npm:@openzeppelin/contracts-upgradeable@4.9.5", + "@openzeppelin/contracts-v4": "npm:@openzeppelin/contracts@4.9.5" }, "devDependencies": { - "@matterlabs/hardhat-zksync-chai-matchers": "^0.1.4", + "@matterlabs/hardhat-zksync-chai-matchers": "^0.2.0", "@matterlabs/hardhat-zksync-node": "^0.0.1-beta.7", "@nomicfoundation/hardhat-chai-matchers": "^1.0.3", "@nomiclabs/hardhat-ethers": "^2.0.0", @@ -62,10 +64,12 @@ "compile-yul": "ts-node scripts/compile-yul.ts", "compile-zasm": "ts-node scripts/compile-zasm.ts", "deploy-preimages": "ts-node scripts/deploy-preimages.ts", + "copy:typechain": "mkdir -p ../l2-contracts/typechain && cp ./typechain/ContractDeployerFactory.ts ../l2-contracts/typechain/", "preprocess:bootloader": "rm -rf ./bootloader/build && yarn ts-node scripts/preprocess-bootloader.ts", "preprocess:system-contracts": "rm -rf ./contracts-preprocessed && ts-node scripts/preprocess-system-contracts.ts", "verify-on-explorer": "hardhat run scripts/verify-on-explorer.ts", "test": "yarn build:test-system-contracts && hardhat test --network zkSyncTestNode", + "test-no-build": "hardhat test --network zkSyncTestNode", "test-node": "hardhat node-zksync --tag v0.0.1-vm1.5.0", "test:bootloader": "cd bootloader/test_infra && cargo run" } diff --git a/system-contracts/scripts/calculate-hashes.ts b/system-contracts/scripts/calculate-hashes.ts index 1fe368d75..a8fc8036f 100644 --- a/system-contracts/scripts/calculate-hashes.ts +++ b/system-contracts/scripts/calculate-hashes.ts @@ -3,7 +3,7 @@ import * as fs from "fs"; import _ from "lodash"; import os from "os"; import { join } from "path"; -import { hashBytecode } from "zksync-web3/build/src/utils"; +import { hashBytecode } from "zksync-ethers/build/utils"; type ContractDetails = { contractName: string; diff --git a/system-contracts/scripts/compile-yul.ts b/system-contracts/scripts/compile-yul.ts index 5b972ef92..67b468987 100644 --- a/system-contracts/scripts/compile-yul.ts +++ b/system-contracts/scripts/compile-yul.ts @@ -1,11 +1,23 @@ // hardhat import should be the first import in the file import type { CompilerPaths } from "./utils"; -import { spawn, compilerLocation, prepareCompilerPaths, getSolcLocation } from "./utils"; +import { + spawn, + compilerLocation, + prepareCompilerPaths, + getSolcLocation, + needsRecompilation, + setCompilationTime, +} from "./utils"; import * as fs from "fs"; import { Command } from "commander"; +import * as _path from "path"; const COMPILER_VERSION = "1.3.18"; const IS_COMPILER_PRE_RELEASE = true; +const CONTRACTS_DIR = "contracts-preprocessed"; +const BOOTLOADER_DIR = "bootloader"; +const TIMESTAMP_FILE_YUL = "last_compilation_yul.timestamp"; +const TIMESTAMP_FILE_BOOTLOADER = "last_compilation_bootloader.timestamp"; export async function compileYul(paths: CompilerPaths, file: string) { const solcCompilerPath = await getSolcLocation(); @@ -32,14 +44,34 @@ async function main() { program.version("0.1.0").name("compile yul").description("publish preimages for the L2 contracts"); program.command("compile-bootloader").action(async () => { - await compileYulFolder("bootloader/build"); - await compileYulFolder("bootloader/tests"); + const timestampFilePath = _path.join(process.cwd(), TIMESTAMP_FILE_BOOTLOADER); + const folderToCheck = _path.join(process.cwd(), BOOTLOADER_DIR); + + if (needsRecompilation(folderToCheck, timestampFilePath)) { + console.log("Compilation needed."); + await compileYulFolder("bootloader/build"); + await compileYulFolder("bootloader/tests"); + setCompilationTime(timestampFilePath); + } else { + console.log("Compilation not needed."); + return; + } }); program.command("compile-precompiles").action(async () => { - await compileYulFolder("contracts-preprocessed"); - await compileYulFolder("contracts-preprocessed/precompiles"); - await compileYulFolder("contracts-preprocessed/precompiles/test-contracts"); + const timestampFilePath = _path.join(process.cwd(), TIMESTAMP_FILE_YUL); + const folderToCheck = _path.join(process.cwd(), CONTRACTS_DIR); + + if (needsRecompilation(folderToCheck, timestampFilePath)) { + console.log("Compilation needed."); + await compileYulFolder("contracts-preprocessed"); + await compileYulFolder("contracts-preprocessed/precompiles"); + await compileYulFolder("contracts-preprocessed/precompiles/test-contracts"); + setCompilationTime(timestampFilePath); + } else { + console.log("Compilation not needed."); + return; + } }); await program.parseAsync(process.argv); diff --git a/system-contracts/scripts/constants.ts b/system-contracts/scripts/constants.ts index 406b4cb6e..171c2b9f3 100644 --- a/system-contracts/scripts/constants.ts +++ b/system-contracts/scripts/constants.ts @@ -174,6 +174,46 @@ export const SYSTEM_CONTRACTS: ISystemContracts = { codeName: "Create2Factory", lang: Language.Solidity, }, + L2GenesisUpgrade: { + // This is explicitly a non-system-contract address. + // We do not use the same address as create2 factories on EVM, since + // this is a zkEVM create2 factory. + address: "0x0000000000000000000000000000000000010001", + codeName: "L2GenesisUpgrade", + lang: Language.Solidity, + }, + L2BridgeHub: { + // This is explicitly a non-system-contract address. + // We do not use the same address as create2 factories on EVM, since + // this is a zkEVM create2 factory. + address: "0x0000000000000000000000000000000000010002", + codeName: "Bridgehub", + lang: Language.Solidity, + }, + L2AssetRouter: { + // This is explicitly a non-system-contract address. + // We do not use the same address as create2 factories on EVM, since + // this is a zkEVM create2 factory. + address: "0x0000000000000000000000000000000000010003", + codeName: "L2AssetRouter", + lang: Language.Solidity, + }, + L2NativeTokenVault: { + // This is explicitly a non-system-contract address. + // We do not use the same address as create2 factories on EVM, since + // this is a zkEVM create2 factory. + address: "0x0000000000000000000000000000000000010004", + codeName: "L2NativeTokenVault", + lang: Language.Solidity, + }, + L2MessageRouter: { + // This is explicitly a non-system-contract address. + // We do not use the same address as create2 factories on EVM, since + // this is a zkEVM create2 factory. + address: "0x0000000000000000000000000000000000010005", + codeName: "L2MessageRouter", + lang: Language.Solidity, + }, } as const; export const EIP712_TX_ID = 113; @@ -184,7 +224,7 @@ export const EIP712_DOMAIN = { name: "zkSync", version: "2", chainId: CHAIN_ID, - // zkSync contract doesn't verify EIP712 signatures. + // ZKsync contract doesn't verify EIP712 signatures. }; export interface TransactionData { diff --git a/system-contracts/scripts/deploy-preimages.ts b/system-contracts/scripts/deploy-preimages.ts index 6803f9a53..0029f56a0 100644 --- a/system-contracts/scripts/deploy-preimages.ts +++ b/system-contracts/scripts/deploy-preimages.ts @@ -8,9 +8,9 @@ import { ethers } from "ethers"; import { formatUnits, parseUnits } from "ethers/lib/utils"; import * as fs from "fs"; import * as path from "path"; -import type { types } from "zksync-web3"; -import { Provider, Wallet } from "zksync-web3"; -import { hashBytecode } from "zksync-web3/build/src/utils"; +import type { types } from "zksync-ethers"; +import { Provider, Wallet } from "zksync-ethers"; +import { hashBytecode } from "zksync-ethers/build/utils"; import { Language, SYSTEM_CONTRACTS } from "./constants"; import type { Dependency, DeployedDependency } from "./utils"; import { checkMarkers, filterPublishedFactoryDeps, getBytecodes, publishFactoryDeps, readYulBytecode } from "./utils"; @@ -103,7 +103,7 @@ class ZkSyncDeployer { this.nonce += 1; } - // Returns the current default account bytecode on zkSync + // Returns the current default account bytecode on ZKsync async currentDefaultAccountBytecode(): Promise { const zkSync = await this.deployer.zkWallet.getMainContract(); return await zkSync.getL2DefaultAccountBytecodeHash(); @@ -114,7 +114,7 @@ class ZkSyncDeployer { const bytecodeHash = ethers.utils.hexlify(hashBytecode(defaultAccountBytecode)); const currentDefaultAccountBytecode = ethers.utils.hexlify(await this.currentDefaultAccountBytecode()); - // If the bytecode is not the same as the one deployed on zkSync, we need to add it to the deployment + // If the bytecode is not the same as the one deployed on ZKsync, we need to add it to the deployment if (bytecodeHash.toLowerCase() !== currentDefaultAccountBytecode) { this.defaultAccountToUpgrade = { name: DEFAULT_ACCOUNT_CONTRACT_NAME, @@ -161,7 +161,7 @@ class ZkSyncDeployer { const bytecodeHash = ethers.utils.hexlify(hashBytecode(bootloaderCode)); const currentBootloaderBytecode = ethers.utils.hexlify(await this.currentBootloaderBytecode()); - // If the bytecode is not the same as the one deployed on zkSync, we need to add it to the deployment + // If the bytecode is not the same as the one deployed on ZKsync, we need to add it to the deployment if (bytecodeHash.toLowerCase() !== currentBootloaderBytecode) { this.bootloaderToUpgrade = { name: BOOTLOADER_CONTRACT_NAME, diff --git a/system-contracts/scripts/preprocess-bootloader.ts b/system-contracts/scripts/preprocess-bootloader.ts index 4dbf145da..952181455 100644 --- a/system-contracts/scripts/preprocess-bootloader.ts +++ b/system-contracts/scripts/preprocess-bootloader.ts @@ -6,6 +6,7 @@ import { existsSync, mkdirSync, writeFileSync, readFileSync } from "fs"; import { render, renderFile } from "template-file"; import { utils } from "zksync-ethers"; import { getRevertSelector, getTransactionUtils } from "./constants"; +import * as fs from "node:fs"; /* eslint-disable @typescript-eslint/no-var-requires */ const preprocess = require("preprocess"); @@ -17,9 +18,16 @@ const OUTPUT_DIR = "bootloader/build"; const PREPROCCESING_MODES = ["proved_batch", "playground_batch"]; function getSelector(contractName: string, method: string): string { - const artifact = hre.artifacts.readArtifactSync(contractName); - const contractInterface = new ethers.utils.Interface(artifact.abi); - + let contractInterface; + try { + const artifact = hre.artifacts.readArtifactSync(contractName); + contractInterface = new ethers.utils.Interface(artifact.abi); + } catch (e) { + const artifact = JSON.parse( + fs.readFileSync(`zkout/${contractName}.sol/${contractName}.json`, { encoding: "utf-8" }) + ); + contractInterface = new ethers.utils.Interface(artifact.abi); + } return contractInterface.getSighash(method); } @@ -33,6 +41,7 @@ function padZeroRight(hexData: string, length: number): string { } const PADDED_SELECTOR_LENGTH = 32 * 2 + 2; + function getPaddedSelector(contractName: string, method: string): string { const result = getSelector(contractName, method); @@ -40,7 +49,13 @@ function getPaddedSelector(contractName: string, method: string): string { } function getSystemContextCodeHash() { - const bytecode = hre.artifacts.readArtifactSync("SystemContext").bytecode; + let bytecode; + try { + const artifact = JSON.parse(fs.readFileSync("zkout/SystemContext.sol/SystemContext.json", { encoding: "utf-8" })); + bytecode = "0x" + artifact.bytecode.object; + } catch (e) { + bytecode = hre.artifacts.readArtifactSync("SystemContext").bytecode; + } return ethers.utils.hexlify(utils.hashBytecode(bytecode)); } diff --git a/system-contracts/scripts/preprocess-system-contracts.ts b/system-contracts/scripts/preprocess-system-contracts.ts index acecee1ac..0b3690a9e 100644 --- a/system-contracts/scripts/preprocess-system-contracts.ts +++ b/system-contracts/scripts/preprocess-system-contracts.ts @@ -3,9 +3,11 @@ import path from "path"; import { renderFile } from "template-file"; import { glob } from "fast-glob"; import { Command } from "commander"; +import { needsRecompilation, deleteDir, setCompilationTime, isFolderEmpty } from "./utils"; const CONTRACTS_DIR = "contracts"; const OUTPUT_DIR = "contracts-preprocessed"; +const TIMESTAMP_FILE = "last_compilation_preprocessing.timestamp"; // File to store the last compilation time const params = { SYSTEM_CONTRACTS_OFFSET: "0x8000", @@ -17,6 +19,18 @@ async function preprocess(testMode: boolean) { params.SYSTEM_CONTRACTS_OFFSET = "0x9000"; } + const timestampFilePath = path.join(process.cwd(), TIMESTAMP_FILE); + const folderToCheck = path.join(process.cwd(), CONTRACTS_DIR); + + if ((await isFolderEmpty(OUTPUT_DIR)) || needsRecompilation(folderToCheck, timestampFilePath) || testMode) { + console.log("Preprocessing needed."); + deleteDir(OUTPUT_DIR); + setCompilationTime(timestampFilePath); + } else { + console.log("Preprocessing not needed."); + return; + } + const contracts = await glob( [`${CONTRACTS_DIR}/**/*.sol`, `${CONTRACTS_DIR}/**/*.yul`, `${CONTRACTS_DIR}/**/*.zasm`], { onlyFiles: true } diff --git a/system-contracts/scripts/utils.ts b/system-contracts/scripts/utils.ts index 2deba9ce0..4c1060ee2 100644 --- a/system-contracts/scripts/utils.ts +++ b/system-contracts/scripts/utils.ts @@ -7,7 +7,8 @@ import type { Deployer } from "@matterlabs/hardhat-zksync-deploy"; import type { BigNumberish, BytesLike } from "ethers"; import { BigNumber, ethers } from "ethers"; import * as fs from "fs"; -import { hashBytecode } from "zksync-web3/build/src/utils"; +import * as fsPr from "fs/promises"; +import { hashBytecode } from "zksync-ethers/build/utils"; import type { YulContractDescription, ZasmContractDescription } from "./constants"; import { Language, SYSTEM_CONTRACTS } from "./constants"; import { getCompilersDir } from "hardhat/internal/util/global-dir"; @@ -83,7 +84,7 @@ export async function outputSystemContracts(): Promise { return await Promise.all(upgradeParamsPromises); } -// Script that publishes preimages for all the system contracts on zkSync +// Script that publishes preimages for all the system contracts on ZKsync // and outputs the JSON that can be used for performing the necessary upgrade const DEFAULT_L2_TX_GAS_LIMIT = 2097152; @@ -257,6 +258,77 @@ export function prepareCompilerPaths(path: string): CompilerPaths { return new CompilerPaths(absolutePathSources, absolutePathArtifacts); } +// Get the latest file modification time in the watched folder +function getLatestModificationTime(folder: string): Date | null { + const files = fs.readdirSync(folder); + let latestTime: Date | null = null; // Initialize to null to avoid uninitialized variable + + files.forEach((file) => { + const filePath = path.join(folder, file); + const stats = fs.statSync(filePath); + if (stats.isDirectory()) { + const dirLatestTime = getLatestModificationTime(filePath); + if (dirLatestTime && (!latestTime || dirLatestTime > latestTime)) { + latestTime = dirLatestTime; + } + } else if (stats.isFile()) { + if (!latestTime || stats.mtime > latestTime) { + latestTime = stats.mtime; + } + } + }); + + return latestTime; +} + +// Read the last compilation timestamp from the file +export function getLastCompilationTime(timestampFile: string): Date | null { + try { + if (fs.existsSync(timestampFile)) { + const timestamp = fs.readFileSync(timestampFile, "utf-8"); + return new Date(parseInt(timestamp, 10)); + } + } catch (error) { + const err = error as Error; // Cast `error` to `Error` + console.error(`Error reading timestamp: ${err.message}`); + } + return null; +} + +// Write the current time to the timestamp file +export function setCompilationTime(timestampFile: string) { + fs.writeFileSync(timestampFile, Date.now().toString()); +} + +// Determine if recompilation is needed +export function needsRecompilation(folder: string, timestampFile: string): boolean { + const lastCompilationTime = getLastCompilationTime(timestampFile); + const latestModificationTime = getLatestModificationTime(folder); + if (!lastCompilationTime) { + return true; // If there's no history, always recompile + } + + return latestModificationTime! > lastCompilationTime; +} + +export function deleteDir(path: string): void { + try { + fs.rmSync(path, { recursive: true, force: true }); // 'recursive: true' deletes all contents, 'force: true' prevents errors if the directory doesn't exist + console.log(`Directory '${path}' deleted successfully.`); + } catch (error) { + console.error(`Error deleting directory '${path}':`, error); + } +} + +export async function isFolderEmpty(folderPath: string): Promise { + try { + const files = await fsPr.readdir(folderPath); // Get a list of files in the folder + return files.length === 0; // If there are no files, the folder is empty + } catch (error) { + console.error("No target folder with artifacts."); + return true; // Return true if an error, as folder doesn't exist. + } +} /** * Performs an API call to the Contract verification API. * diff --git a/system-contracts/test/AccountCodeStorage.spec.ts b/system-contracts/test/AccountCodeStorage.spec.ts index 994cfacc8..dca782e01 100644 --- a/system-contracts/test/AccountCodeStorage.spec.ts +++ b/system-contracts/test/AccountCodeStorage.spec.ts @@ -44,7 +44,7 @@ describe("AccountCodeStorage tests", function () { it("non-deployer failed to call", async () => { await expect( accountCodeStorage.storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH) - ).to.be.revertedWith("Callable only by the deployer system contract"); + ).to.be.revertedWithCustomError(accountCodeStorage, "Unauthorized"); }); it("failed to set with constructed bytecode", async () => { @@ -52,7 +52,7 @@ describe("AccountCodeStorage tests", function () { accountCodeStorage .connect(deployerAccount) .storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTED_BYTECODE_HASH) - ).to.be.revertedWith("Code hash is not for a contract on constructor"); + ).to.be.revertedWithCustomError(accountCodeStorage, "InvalidCodeHash"); }); it("successfully stored", async () => { @@ -72,7 +72,7 @@ describe("AccountCodeStorage tests", function () { it("non-deployer failed to call", async () => { await expect( accountCodeStorage.storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH) - ).to.be.revertedWith("Callable only by the deployer system contract"); + ).to.be.revertedWithCustomError(accountCodeStorage, "Unauthorized"); }); it("failed to set with constructing bytecode", async () => { @@ -80,7 +80,7 @@ describe("AccountCodeStorage tests", function () { accountCodeStorage .connect(deployerAccount) .storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH) - ).to.be.revertedWith("Code hash is not for a constructed contract"); + ).to.be.revertedWithCustomError(accountCodeStorage, "InvalidCodeHash"); }); it("successfully stored", async () => { @@ -96,8 +96,9 @@ describe("AccountCodeStorage tests", function () { describe("markAccountCodeHashAsConstructed", function () { it("non-deployer failed to call", async () => { - await expect(accountCodeStorage.markAccountCodeHashAsConstructed(RANDOM_ADDRESS)).to.be.revertedWith( - "Callable only by the deployer system contract" + await expect(accountCodeStorage.markAccountCodeHashAsConstructed(RANDOM_ADDRESS)).to.be.revertedWithCustomError( + accountCodeStorage, + "Unauthorized" ); }); @@ -108,7 +109,7 @@ describe("AccountCodeStorage tests", function () { await expect( accountCodeStorage.connect(deployerAccount).markAccountCodeHashAsConstructed(RANDOM_ADDRESS) - ).to.be.revertedWith("Code hash is not for a contract on constructor"); + ).to.be.revertedWithCustomError(accountCodeStorage, "InvalidCodeHash"); await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); }); diff --git a/system-contracts/test/BootloaderUtilities.spec.ts b/system-contracts/test/BootloaderUtilities.spec.ts index 998b98e8b..7c3c8ed69 100644 --- a/system-contracts/test/BootloaderUtilities.spec.ts +++ b/system-contracts/test/BootloaderUtilities.spec.ts @@ -84,7 +84,10 @@ describe("BootloaderUtilities tests", function () { signature[64] = 29; txData.signature = signature; - await expect(bootloaderUtilities.getTransactionHashes(txData)).to.be.revertedWith("Invalid v value"); + await expect(bootloaderUtilities.getTransactionHashes(txData)).to.be.revertedWithCustomError( + bootloaderUtilities, + "InvalidSig" + ); }); }); @@ -130,7 +133,10 @@ describe("BootloaderUtilities tests", function () { signature[64] = 0; EIP1559TxData.signature = signature; - await expect(bootloaderUtilities.getTransactionHashes(EIP1559TxData)).to.be.revertedWith("Invalid v value"); + await expect(bootloaderUtilities.getTransactionHashes(EIP1559TxData)).to.be.revertedWithCustomError( + bootloaderUtilities, + "InvalidSig" + ); }); }); @@ -176,7 +182,10 @@ describe("BootloaderUtilities tests", function () { signature[64] = 100; EIP2930TxData.signature = signature; - await expect(bootloaderUtilities.getTransactionHashes(EIP2930TxData)).to.be.revertedWith("Invalid v value"); + await expect(bootloaderUtilities.getTransactionHashes(EIP2930TxData)).to.be.revertedWithCustomError( + bootloaderUtilities, + "InvalidSig" + ); }); }); }); diff --git a/system-contracts/test/CodeOracle.spec.ts b/system-contracts/test/CodeOracle.spec.ts index b4df2ceaf..d9b0c3781 100644 --- a/system-contracts/test/CodeOracle.spec.ts +++ b/system-contracts/test/CodeOracle.spec.ts @@ -1,4 +1,4 @@ -import { hashBytecode } from "zksync-web3/build/src/utils"; +import { hashBytecode } from "zksync-ethers/build/utils"; import type { CodeOracleTest } from "../typechain"; import { REAL_CODE_ORACLE_CONTRACT_ADDRESS } from "./shared/constants"; import { publishBytecode, setCode, getCode, deployContract } from "./shared/utils"; diff --git a/system-contracts/test/ComplexUpgrader.spec.ts b/system-contracts/test/ComplexUpgrader.spec.ts index 63b4a61eb..e9104e010 100644 --- a/system-contracts/test/ComplexUpgrader.spec.ts +++ b/system-contracts/test/ComplexUpgrader.spec.ts @@ -18,8 +18,9 @@ describe("ComplexUpgrader tests", function () { describe("upgrade", function () { it("non force deployer failed to call", async () => { - await expect(complexUpgrader.upgrade(dummyUpgrade.address, "0xdeadbeef")).to.be.revertedWith( - "Can only be called by FORCE_DEPLOYER" + await expect(complexUpgrader.upgrade(dummyUpgrade.address, "0xdeadbeef")).to.be.revertedWithCustomError( + complexUpgrader, + "Unauthorized" ); }); diff --git a/system-contracts/test/Compressor.spec.ts b/system-contracts/test/Compressor.spec.ts index 094eddd99..184d675c8 100644 --- a/system-contracts/test/Compressor.spec.ts +++ b/system-contracts/test/Compressor.spec.ts @@ -46,8 +46,9 @@ describe("Compressor tests", function () { describe("publishCompressedBytecode", function () { it("should revert when it's a non-bootloader call", async () => { - await expect(compressor.publishCompressedBytecode("0x", "0x0000")).to.be.revertedWith( - "Callable only by the bootloader" + await expect(compressor.publishCompressedBytecode("0x", "0x0000")).to.be.revertedWithCustomError( + compressor, + "CallerMustBeBootloader" ); }); @@ -57,7 +58,7 @@ describe("Compressor tests", function () { const COMPRESSED_BYTECODE = "0x0002" + "deadbeefdeadbeef" + "0000" + "0000" + "0000" + "0000"; await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("Encoded data length should be 4 times shorter than the original bytecode"); + ).to.be.revertedWithCustomError(compressor, "EncodedLengthNotFourTimesSmallerThanOriginal"); }); it("should revert when there is no encoded data", async () => { @@ -66,7 +67,7 @@ describe("Compressor tests", function () { const COMPRESSED_BYTECODE = "0x0002" + "deadbeefdeadbeef" + "deadbeefdeadbeef"; await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("Encoded data length should be 4 times shorter than the original bytecode"); + ).to.be.revertedWithCustomError(compressor, "EncodedLengthNotFourTimesSmallerThanOriginal"); }); it("should revert when the encoded data length is invalid", async () => { @@ -80,7 +81,7 @@ describe("Compressor tests", function () { // The length of the encodedData should be 32 / 4 = 8 bytes await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("Encoded data length should be 4 times shorter than the original bytecode"); + ).to.be.revertedWithCustomError(compressor, "EncodedLengthNotFourTimesSmallerThanOriginal"); }); it("should revert when the dictionary has too many entries", async () => { @@ -101,7 +102,7 @@ describe("Compressor tests", function () { // The dictionary should have at most encode data length entries await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("Dictionary should have at most the same number of entries as the encoded data"); + ).to.be.revertedWithCustomError(compressor, "DictionaryDividedByEightNotGreaterThanEncodedDividedByTwo"); }); it("should revert when the encoded data has chunks where index is out of bounds", async () => { @@ -112,7 +113,7 @@ describe("Compressor tests", function () { // The dictionary has only 1 entry, so at the last entry of the encoded data the chunk index is out of bounds await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("Encoded chunk index is out of bounds"); + ).to.be.revertedWithCustomError(compressor, "IndexOutOfBounds"); }); it("should revert when the encoded data has chunks that does not match the original bytecode", async () => { @@ -122,7 +123,7 @@ describe("Compressor tests", function () { "0x0002" + "deadbeefdeadbeef" + "1111111111111111" + "0001" + "0000" + "0000" + "0001"; await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("Encoded chunk does not match the original bytecode"); + ).to.be.revertedWithCustomError(compressor, "EncodedAndRealBytecodeChunkNotEqual"); }); it("should revert when the bytecode length in bytes is invalid", async () => { @@ -131,7 +132,7 @@ describe("Compressor tests", function () { const COMPRESSED_BYTECODE = "0x0001" + "deadbeefdeadbeef" + "0000" + "0000" + "0000"; await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("po"); + ).to.be.revertedWithCustomError(compressor, "MalformedBytecode"); }); it("should revert when the bytecode length in words is odd", async () => { @@ -140,7 +141,7 @@ describe("Compressor tests", function () { const COMPRESSED_BYTECODE = "0x0001" + "deadbeefdeadbeef" + "0000".repeat(4 * 2); await expect( compressor.connect(bootloaderAccount).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) - ).to.be.revertedWith("pr"); + ).to.be.revertedWithCustomError(compressor, "MalformedBytecode"); }); // Test case with too big bytecode is unrealistic because API cannot accept so much data. @@ -182,12 +183,6 @@ describe("Compressor tests", function () { }); describe("verifyCompressedStateDiffs", function () { - it("non l1 messenger failed to call", async () => { - await expect(compressor.verifyCompressedStateDiffs(0, 8, "0x", "0x0000")).to.be.revertedWith( - "Inappropriate caller" - ); - }); - it("enumeration index size is too large", async () => { const stateDiffs = [ { @@ -202,7 +197,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(9, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(1, 9, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("enumeration index size is too large"); + ).to.be.revertedWithCustomError(compressor, "IndexSizeError"); }); it("initial write key mismatch", async () => { @@ -219,7 +214,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(4, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(1, 4, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("iw: initial key mismatch"); + ).to.be.revertedWithCustomError(compressor, "DerivedKeyNotEqualToCompressedValue"); }); it("repeated write key mismatch", async () => { @@ -236,7 +231,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(8, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(1, 8, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("rw: enum key mismatch"); + ).to.be.revertedWithCustomError(compressor, "CompressorEnumIndexNotEqual"); }); it("no compression value mismatch", async () => { @@ -259,7 +254,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(3, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(2, 3, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("transform or no compression: compressed and final mismatch"); + ).to.be.revertedWithCustomError(compressor, "CompressionValueTransformError"); }); it("transform value mismatch", async () => { @@ -282,7 +277,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(1, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(2, 1, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("transform or no compression: compressed and final mismatch"); + ).to.be.revertedWithCustomError(compressor, "CompressionValueTransformError"); }); it("add value mismatch", async () => { @@ -299,7 +294,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(1, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(1, 1, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("add: initial plus converted not equal to final"); + ).to.be.revertedWithCustomError(compressor, "CompressionValueAddError"); }); it("sub value mismatch", async () => { @@ -316,7 +311,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(1, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(1, 1, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("sub: initial minus converted not equal to final"); + ).to.be.revertedWithCustomError(compressor, "CompressionValueSubError"); }); it("invalid operation", async () => { @@ -335,7 +330,7 @@ describe("Compressor tests", function () { compressedStateDiffs = compressedStateDiffsCharArray.join(""); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(1, 1, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("unsupported operation"); + ).to.be.revertedWithCustomError(compressor, "UnsupportedOperation"); }); it("Incorrect number of initial storage diffs", async () => { @@ -363,7 +358,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(1, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(2, 1, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("Incorrect number of initial storage diffs"); + ).to.be.revertedWithCustomError(compressor, "CompressorInitialWritesProcessedNotEqual"); }); it("Extra data in compressed state diffs", async () => { @@ -391,7 +386,7 @@ describe("Compressor tests", function () { const compressedStateDiffs = compressStateDiffs(1, stateDiffs); await expect( compressor.connect(l1MessengerAccount).verifyCompressedStateDiffs(2, 1, encodedStateDiffs, compressedStateDiffs) - ).to.be.revertedWith("Extra data in _compressedStateDiffs"); + ).to.be.revertedWithCustomError(compressor, "StateDiffLengthMismatch"); }); it("successfully verified", async () => { diff --git a/system-contracts/test/ContractDeployer.spec.ts b/system-contracts/test/ContractDeployer.spec.ts index 6f8984eae..bcae882c0 100644 --- a/system-contracts/test/ContractDeployer.spec.ts +++ b/system-contracts/test/ContractDeployer.spec.ts @@ -68,8 +68,9 @@ describe("ContractDeployer tests", function () { describe("updateAccountVersion", function () { it("non system call failed", async () => { - await expect(contractDeployer.updateAccountVersion(AA_VERSION_NONE)).to.be.revertedWith( - "This method require system call flag" + await expect(contractDeployer.updateAccountVersion(AA_VERSION_NONE)).to.be.revertedWithCustomError( + contractDeployer, + "SystemCallFlagRequired" ); }); @@ -96,8 +97,9 @@ describe("ContractDeployer tests", function () { describe("updateNonceOrdering", function () { it("non system call failed", async () => { - await expect(contractDeployer.updateNonceOrdering(NONCE_ORDERING_SEQUENTIAL)).to.be.revertedWith( - "This method require system call flag" + await expect(contractDeployer.updateNonceOrdering(NONCE_ORDERING_SEQUENTIAL)).to.be.revertedWithCustomError( + contractDeployer, + "SystemCallFlagRequired" ); }); @@ -115,9 +117,9 @@ describe("ContractDeployer tests", function () { expect((await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).nonceOrdering).to.be.eq( NONCE_ORDERING_ARBITRARY ); - await expect(contractDeployerSystemCall.updateNonceOrdering(NONCE_ORDERING_SEQUENTIAL)).to.be.revertedWith( - "It is only possible to change from sequential to arbitrary ordering" - ); + await expect( + contractDeployerSystemCall.updateNonceOrdering(NONCE_ORDERING_SEQUENTIAL) + ).to.be.revertedWithCustomError(contractDeployer, "InvalidNonceOrderingChange"); }); }); @@ -233,7 +235,7 @@ describe("ContractDeployer tests", function () { "0x", AA_VERSION_NONE ) - ).to.be.revertedWith("This method require system call flag"); + ).to.be.revertedWithCustomError(contractDeployer, "SystemCallFlagRequired"); }); it("zero bytecode hash failed", async () => { @@ -244,7 +246,7 @@ describe("ContractDeployer tests", function () { "0x", AA_VERSION_NONE ) - ).to.be.revertedWith("BytecodeHash cannot be zero"); + ).to.be.revertedWithCustomError(contractDeployer, "EmptyBytes32"); }); it("not known bytecode hash failed", async () => { @@ -261,7 +263,7 @@ describe("ContractDeployer tests", function () { "0x", AA_VERSION_NONE ) - ).to.be.revertedWith("The code hash is not known"); + ).to.be.revertedWithCustomError(contractDeployer, "UnknownCodeHash"); }); // TODO: other mock events can be checked as well @@ -344,7 +346,7 @@ describe("ContractDeployer tests", function () { "0xdeadbeef", AA_VERSION_NONE ) - ).to.be.revertedWith("This method require system call flag"); + ).to.be.revertedWithCustomError(contractDeployer, "SystemCallFlagRequired"); }); it("zero bytecode hash failed", async () => { @@ -355,7 +357,7 @@ describe("ContractDeployer tests", function () { "0x", AA_VERSION_NONE ) - ).to.be.revertedWith("BytecodeHash cannot be zero"); + ).to.be.revertedWithCustomError(contractDeployerSystemCall, "EmptyBytes32"); }); it("not known bytecode hash failed", async () => { @@ -386,7 +388,7 @@ describe("ContractDeployer tests", function () { "0x", AA_VERSION_NONE ) - ).to.be.revertedWith("The code hash is not known"); + ).to.be.revertedWithCustomError(contractDeployerSystemCall, "UnknownCodeHash"); }); it("successfully deployed", async () => { @@ -419,7 +421,7 @@ describe("ContractDeployer tests", function () { "0xdeadbeef", AA_VERSION_NONE ) - ).to.be.revertedWith("Code hash is non-zero"); + ).to.be.revertedWithCustomError(contractDeployerSystemCall, "HashIsNonZero"); await setResult("AccountCodeStorage", "getCodeHash", [expectedAddress], { failure: false, returnData: ethers.constants.HashZero, @@ -477,7 +479,7 @@ describe("ContractDeployer tests", function () { it("non system call failed", async () => { await expect( contractDeployer.create(ethers.constants.HashZero, utils.hashBytecode(deployableArtifact.bytecode), "0x") - ).to.be.revertedWith("This method require system call flag"); + ).to.be.revertedWithCustomError(contractDeployer, "SystemCallFlagRequired"); }); it("successfully deployed", async () => { @@ -534,7 +536,7 @@ describe("ContractDeployer tests", function () { it("non system call failed", async () => { await expect( contractDeployer.create2(ethers.constants.HashZero, utils.hashBytecode(deployableArtifact.bytecode), "0xabcd") - ).to.be.revertedWith("This method require system call flag"); + ).to.be.revertedWithCustomError(contractDeployer, "SystemCallFlagRequired"); }); it("successfully deployed", async () => { @@ -564,8 +566,9 @@ describe("ContractDeployer tests", function () { value: 0, input: "0x", }; - await expect(contractDeployer.forceDeployOnAddress(deploymentData, wallet.address)).to.be.revertedWith( - "Callable only by self" + await expect(contractDeployer.forceDeployOnAddress(deploymentData, wallet.address)).to.be.revertedWithCustomError( + contractDeployer, + "Unauthorized" ); }); @@ -585,7 +588,7 @@ describe("ContractDeployer tests", function () { }; await expect( contractDeployer.connect(deployerAccount).forceDeployOnAddress(deploymentData, wallet.address) - ).to.be.revertedWith("The code hash is not known"); + ).to.be.revertedWithCustomError(contractDeployerSystemCall, "UnknownCodeHash"); }); it("successfully deployed", async () => { @@ -628,8 +631,9 @@ describe("ContractDeployer tests", function () { input: "0xab", }, ]; - await expect(contractDeployer.forceDeployOnAddresses(deploymentData)).to.be.revertedWith( - "Can only be called by FORCE_DEPLOYER or COMPLEX_UPGRADER_CONTRACT" + await expect(contractDeployer.forceDeployOnAddresses(deploymentData)).to.be.revertedWithCustomError( + contractDeployer, + "Unauthorized" ); }); diff --git a/system-contracts/test/DefaultAccount.spec.ts b/system-contracts/test/DefaultAccount.spec.ts index 77dafa1ed..9f3d380d3 100644 --- a/system-contracts/test/DefaultAccount.spec.ts +++ b/system-contracts/test/DefaultAccount.spec.ts @@ -2,7 +2,7 @@ import { expect } from "chai"; import { ethers, network } from "hardhat"; import type { Wallet } from "zksync-ethers"; import * as zksync from "zksync-ethers"; -import { serialize } from "zksync-web3/build/src/utils"; +import { serialize } from "zksync-ethers/build/utils"; import type { DefaultAccount, DelegateCaller, MockContract } from "../typechain"; import { DefaultAccountFactory } from "../typechain"; import { TEST_BOOTLOADER_FORMAL_ADDRESS } from "./shared/constants"; diff --git a/system-contracts/test/EventWriter.spec.ts b/system-contracts/test/EventWriter.spec.ts index 072f8e35b..35c5d66f7 100644 --- a/system-contracts/test/EventWriter.spec.ts +++ b/system-contracts/test/EventWriter.spec.ts @@ -2,7 +2,7 @@ import { expect } from "chai"; import { ethers } from "hardhat"; import type { Wallet } from "zksync-ethers"; import { Contract } from "zksync-ethers"; -import type { TransactionResponse } from "zksync-web3/build/src/types"; +import type { TransactionResponse } from "zksync-ethers/build/types"; import { ONE_BYTES32_HEX, REAL_EVENT_WRITER_CONTRACT_ADDRESS } from "./shared/constants"; import { EXTRA_ABI_CALLER_ADDRESS, encodeExtraAbiCallerCalldata } from "./shared/extraAbiCaller"; import { getCode, getWallets, loadYulBytecode, loadZasmBytecode, setCode } from "./shared/utils"; diff --git a/system-contracts/test/ImmutableSimulator.spec.ts b/system-contracts/test/ImmutableSimulator.spec.ts index 530fa370c..0adce4c2e 100644 --- a/system-contracts/test/ImmutableSimulator.spec.ts +++ b/system-contracts/test/ImmutableSimulator.spec.ts @@ -31,8 +31,9 @@ describe("ImmutableSimulator tests", function () { describe("setImmutables", function () { it("non-deployer failed to call", async () => { - await expect(immutableSimulator.setImmutables(RANDOM_ADDRESS, IMMUTABLES_DATA)).to.be.revertedWith( - "Callable only by the deployer system contract" + await expect(immutableSimulator.setImmutables(RANDOM_ADDRESS, IMMUTABLES_DATA)).to.be.revertedWithCustomError( + immutableSimulator, + "Unauthorized" ); }); diff --git a/system-contracts/test/KnownCodesStorage.spec.ts b/system-contracts/test/KnownCodesStorage.spec.ts index 9558f85dc..36a034cb7 100644 --- a/system-contracts/test/KnownCodesStorage.spec.ts +++ b/system-contracts/test/KnownCodesStorage.spec.ts @@ -50,21 +50,22 @@ describe("KnownCodesStorage tests", function () { describe("markBytecodeAsPublished", function () { it("non-compressor failed to call", async () => { - await expect(knownCodesStorage.markBytecodeAsPublished(BYTECODE_HASH_1)).to.be.revertedWith( - "Callable only by the compressor" + await expect(knownCodesStorage.markBytecodeAsPublished(BYTECODE_HASH_1)).to.be.revertedWithCustomError( + knownCodesStorage, + "Unauthorized" ); }); it("incorrectly formatted bytecode hash failed to call", async () => { await expect( knownCodesStorage.connect(compressorAccount).markBytecodeAsPublished(INCORRECTLY_FORMATTED_HASH) - ).to.be.revertedWith("Incorrectly formatted bytecodeHash"); + ).to.be.revertedWithCustomError(knownCodesStorage, "MalformedBytecode"); }); it("invalid length bytecode hash failed to call", async () => { await expect( knownCodesStorage.connect(compressorAccount).markBytecodeAsPublished(INVALID_LENGTH_HASH) - ).to.be.revertedWith("Code length in words must be odd"); + ).to.be.revertedWithCustomError(knownCodesStorage, "MalformedBytecode"); }); it("successfully marked", async () => { @@ -85,9 +86,9 @@ describe("KnownCodesStorage tests", function () { describe("markFactoryDeps", function () { it("non-bootloader failed to call", async () => { - await expect(knownCodesStorage.markFactoryDeps(false, [BYTECODE_HASH_2, BYTECODE_HASH_3])).to.be.revertedWith( - "Callable only by the bootloader" - ); + await expect( + knownCodesStorage.markFactoryDeps(false, [BYTECODE_HASH_2, BYTECODE_HASH_3]) + ).to.be.revertedWithCustomError(knownCodesStorage, "CallerMustBeBootloader"); }); it("incorrectly formatted bytecode hash failed to call", async () => { @@ -95,13 +96,13 @@ describe("KnownCodesStorage tests", function () { knownCodesStorage .connect(bootloaderAccount) .markFactoryDeps(true, [BYTECODE_HASH_2, INCORRECTLY_FORMATTED_HASH]) - ).to.be.revertedWith("Incorrectly formatted bytecodeHash"); + ).to.be.revertedWithCustomError(knownCodesStorage, "MalformedBytecode"); }); it("invalid length bytecode hash failed to call", async () => { await expect( knownCodesStorage.connect(bootloaderAccount).markFactoryDeps(false, [INVALID_LENGTH_HASH, BYTECODE_HASH_3]) - ).to.be.revertedWith("Code length in words must be odd"); + ).to.be.revertedWithCustomError(knownCodesStorage, "MalformedBytecode"); }); it("successfully marked", async () => { diff --git a/system-contracts/test/L1Messenger.spec.ts b/system-contracts/test/L1Messenger.spec.ts index 74f16fc10..225b197d6 100644 --- a/system-contracts/test/L1Messenger.spec.ts +++ b/system-contracts/test/L1Messenger.spec.ts @@ -1,31 +1,33 @@ import { ethers, network } from "hardhat"; import type { L1Messenger } from "../typechain"; +import { IL2DAValidatorFactory } from "../typechain/IL2DAValidatorFactory"; import { L1MessengerFactory } from "../typechain"; import { prepareEnvironment, setResult } from "./shared/mocks"; -import type { StateDiff } from "./shared/utils"; -import { compressStateDiffs, deployContractOnAddress, encodeStateDiffs, getCode, getWallets } from "./shared/utils"; -import { utils } from "zksync-web3"; -import type { Wallet } from "zksync-web3"; +import { deployContractOnAddress, getCode, getWallets } from "./shared/utils"; +import { utils, L2VoidSigner } from "zksync-ethers"; +import type { Wallet } from "zksync-ethers"; import { TEST_KNOWN_CODE_STORAGE_CONTRACT_ADDRESS, TEST_L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, TEST_BOOTLOADER_FORMAL_ADDRESS, - TWO_IN_256, } from "./shared/constants"; import { expect } from "chai"; -import { BigNumber } from "ethers"; import { randomBytes } from "crypto"; +const EXPECTED_DA_INPUT_OFFSET = 160; +const L2_TO_L1_LOGS_MERKLE_TREE_LEAVES = 16_384; +const L2_TO_L1_LOG_SERIALIZE_SIZE = 88; +const L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH = "0x72abee45b59e344af8a6e520241c4744aff26ed411f4c4b00f8af09adada43ba"; + describe("L1Messenger tests", () => { let l1Messenger: L1Messenger; let wallet: Wallet; let l1MessengerAccount: ethers.Signer; let knownCodeStorageAccount: ethers.Signer; let bootloaderAccount: ethers.Signer; - let stateDiffsSetupData: StateDiffSetupData; let logData: LogData; - let bytecodeData: ContentLengthPair; let emulator: L1MessengerPubdataEmulator; + let bytecode; before(async () => { await prepareEnvironment(); @@ -36,13 +38,16 @@ describe("L1Messenger tests", () => { knownCodeStorageAccount = await ethers.getImpersonatedSigner(TEST_KNOWN_CODE_STORAGE_CONTRACT_ADDRESS); bootloaderAccount = await ethers.getImpersonatedSigner(TEST_BOOTLOADER_FORMAL_ADDRESS); // setup - stateDiffsSetupData = await setupStateDiffs(); logData = setupLogData(l1MessengerAccount, l1Messenger); - bytecodeData = await setupBytecodeData(ethers.constants.AddressZero); + bytecode = await getCode(TEST_L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS); await setResult("SystemContext", "txNumberInBlock", [], { failure: false, returnData: ethers.utils.defaultAbiCoder.encode(["uint16"], [1]), }); + await setResult("IMessageRoot", "getAggregatedRoot", [], { + failure: false, + returnData: ethers.constants.HashZero, + }); emulator = new L1MessengerPubdataEmulator(); }); @@ -50,7 +55,10 @@ describe("L1Messenger tests", () => { // cleaning the state of l1Messenger await l1Messenger .connect(bootloaderAccount) - .publishPubdataAndClearState(emulator.buildTotalL2ToL1PubdataAndStateDiffs()); + .publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger) + ); await network.provider.request({ method: "hardhat_stopImpersonatingAccount", params: [TEST_L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS], @@ -73,23 +81,15 @@ describe("L1Messenger tests", () => { emulator.addLog(logData.logs[0].log); await (await l1Messenger.connect(l1MessengerAccount).sendToL1(logData.messages[0].message)).wait(); emulator.addLog(logData.messages[0].log); - emulator.addMessage({ - lengthBytes: logData.messages[0].currentMessageLengthBytes, - content: logData.messages[0].message, - }); - await ( - await l1Messenger - .connect(knownCodeStorageAccount) - .requestBytecodeL1Publication(await ethers.utils.hexlify(utils.hashBytecode(bytecodeData.content)), { - gasLimit: 130000000, - }) - ).wait(); - emulator.addBytecode(bytecodeData); - emulator.setStateDiffsSetupData(stateDiffsSetupData); + await ( await l1Messenger .connect(bootloaderAccount) - .publishPubdataAndClearState(emulator.buildTotalL2ToL1PubdataAndStateDiffs(), { gasLimit: 1000000000 }) + .publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger), + { gasLimit: 1000000000 } + ) ).wait(); }); @@ -98,8 +98,22 @@ describe("L1Messenger tests", () => { await expect( l1Messenger .connect(bootloaderAccount) - .publishPubdataAndClearState(emulator.buildTotalL2ToL1PubdataAndStateDiffs({ numberOfLogs: 0x4002 })) - ).to.be.rejectedWith("Too many L2->L1 logs"); + .publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, { numberOfLogs: 0x4002 }) + ) + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); + }); + + it("should revert Invalid input DA signature", async () => { + await expect( + l1Messenger + .connect(bootloaderAccount) + .publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, { l2DaValidatorFunctionSig: "0x12121212" }) + ) + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); }); it("should revert logshashes mismatch", async () => { @@ -120,56 +134,78 @@ describe("L1Messenger tests", () => { await expect( l1Messenger .connect(bootloaderAccount) - .publishPubdataAndClearState(emulator.buildTotalL2ToL1PubdataAndStateDiffs(overrideData)) - ).to.be.rejectedWith("reconstructedChainedLogsHash is not equal to chainedLogsHash"); + .publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, overrideData) + ) + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); }); - it("should revert chainedMessageHash mismatch", async () => { - // Buffer.alloc(32, 6), to trigger the revert - const wrongMessage = { lengthBytes: logData.messages[0].currentMessageLengthBytes, content: Buffer.alloc(32, 6) }; - const overrideData = { messages: [...emulator.messages] }; - overrideData.messages[0] = wrongMessage; + it("should revert Invalid input msgs hash", async () => { + const correctChainedMessagesHash = await l1Messenger.provider.getStorageAt(l1Messenger.address, 2); + await expect( - l1Messenger - .connect(bootloaderAccount) - .publishPubdataAndClearState(emulator.buildTotalL2ToL1PubdataAndStateDiffs(overrideData)) - ).to.be.rejectedWith("reconstructedChainedMessagesHash is not equal to chainedMessagesHash"); + l1Messenger.connect(bootloaderAccount).publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, { + chainedMessagesHash: ethers.utils.keccak256(correctChainedMessagesHash), + }) + ) + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); }); - it("should revert state diff compression version mismatch", async () => { - await ( - await l1Messenger - .connect(knownCodeStorageAccount) - .requestBytecodeL1Publication(await ethers.utils.hexlify(utils.hashBytecode(bytecodeData.content)), { - gasLimit: 130000000, + it("should revert Invalid bytecodes hash", async () => { + const correctChainedBytecodesHash = await l1Messenger.provider.getStorageAt(l1Messenger.address, 3); + + await expect( + l1Messenger.connect(bootloaderAccount).publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, { + chainedBytecodeHash: ethers.utils.keccak256(correctChainedBytecodesHash), }) - ).wait(); - // modify version to trigger the revert + ) + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); + }); + + it("should revert Invalid offset", async () => { await expect( l1Messenger.connect(bootloaderAccount).publishPubdataAndClearState( - emulator.buildTotalL2ToL1PubdataAndStateDiffs({ - version: ethers.utils.hexZeroPad(ethers.utils.hexlify(66), 1), + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, { + operatorDataOffset: EXPECTED_DA_INPUT_OFFSET + 1, }) ) - ).to.be.rejectedWith("state diff compression version mismatch"); + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); }); - it("should revert extra data", async () => { - // add extra data to trigger the revert + it("should revert Invalid length", async () => { await expect( l1Messenger .connect(bootloaderAccount) .publishPubdataAndClearState( - ethers.utils.concat([emulator.buildTotalL2ToL1PubdataAndStateDiffs(), Buffer.alloc(1, 64)]) + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, { operatorDataLength: 1 }) ) - ).to.be.rejectedWith("Extra data in the totalL2ToL1Pubdata array"); + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); + }); + + it("should revert Invalid root hash", async () => { + await expect( + l1Messenger.connect(bootloaderAccount).publishPubdataAndClearState( + ethers.constants.AddressZero, + await emulator.buildTotalL2ToL1PubdataAndStateDiffs(l1Messenger, { + chainedLogsRootHash: ethers.constants.HashZero, + }) + ) + ).to.be.revertedWithCustomError(l1Messenger, "ReconstructionMismatch"); }); }); describe("sendL2ToL1Log", async () => { it("should revert when not called by the system contract", async () => { - await expect(l1Messenger.sendL2ToL1Log(true, logData.key, logData.value)).to.be.rejectedWith( - "This method require the caller to be system contract" + await expect(l1Messenger.sendL2ToL1Log(true, logData.key, logData.value)).to.be.revertedWithCustomError( + l1Messenger, + "CallerMustBeSystemContract" ); }); @@ -234,102 +270,32 @@ describe("L1Messenger tests", () => { .and.to.emit(l1Messenger, "L2ToL1LogSent") .withArgs([0, true, 1, l1Messenger.address, expectedKey, ethers.utils.keccak256(logData.messages[0].message)]); emulator.addLog(logData.messages[0].log); - emulator.addMessage({ - lengthBytes: logData.messages[0].currentMessageLengthBytes, - content: logData.messages[0].message, - }); }); }); describe("requestBytecodeL1Publication", async () => { it("should revert when not called by known code storage contract", async () => { const byteCodeHash = ethers.utils.hexlify(randomBytes(32)); - await expect(l1Messenger.requestBytecodeL1Publication(byteCodeHash)).to.be.rejectedWith("Inappropriate caller"); + await expect(l1Messenger.requestBytecodeL1Publication(byteCodeHash)).to.be.revertedWithCustomError( + l1Messenger, + "Unauthorized" + ); }); it("should emit event, called by known code system contract", async () => { await expect( l1Messenger .connect(knownCodeStorageAccount) - .requestBytecodeL1Publication(await ethers.utils.hexlify(utils.hashBytecode(bytecodeData.content)), { - gasLimit: 130000000, + .requestBytecodeL1Publication(ethers.utils.hexlify(utils.hashBytecode(bytecode)), { + gasLimit: 230000000, }) ) .to.emit(l1Messenger, "BytecodeL1PublicationRequested") - .withArgs(await ethers.utils.hexlify(utils.hashBytecode(bytecodeData.content))); - emulator.addBytecode(bytecodeData); + .withArgs(ethers.utils.hexlify(utils.hashBytecode(bytecode))); }); }); }); -// Interface represents the structure of the data that that is used in totalL2ToL1PubdataAndStateDiffs. -interface StateDiffSetupData { - encodedStateDiffs: string; - compressedStateDiffs: string; - enumerationIndexSizeBytes: string; - numberOfStateDiffsBytes: string; - compressedStateDiffsSizeBytes: string; -} - -async function setupStateDiffs(): Promise { - const stateDiffs: StateDiff[] = [ - { - key: "0x1234567890123456789012345678901234567890123456789012345678901230", - index: 0, - initValue: BigNumber.from("0x1234567890123456789012345678901234567890123456789012345678901231"), - finalValue: BigNumber.from("0x1234567890123456789012345678901234567890123456789012345678901230"), - }, - { - key: "0x1234567890123456789012345678901234567890123456789012345678901232", - index: 1, - initValue: TWO_IN_256.sub(1), - finalValue: BigNumber.from(1), - }, - { - key: "0x1234567890123456789012345678901234567890123456789012345678901234", - index: 0, - initValue: TWO_IN_256.div(2), - finalValue: BigNumber.from(1), - }, - { - key: "0x1234567890123456789012345678901234567890123456789012345678901236", - index: 2323, - initValue: BigNumber.from("0x1234567890123456789012345678901234567890123456789012345678901237"), - finalValue: BigNumber.from("0x0239329298382323782378478237842378478237847237237872373272373272"), - }, - { - key: "0x1234567890123456789012345678901234567890123456789012345678901238", - index: 2, - initValue: BigNumber.from(0), - finalValue: BigNumber.from(1), - }, - ]; - const encodedStateDiffs = encodeStateDiffs(stateDiffs); - const compressedStateDiffs = compressStateDiffs(4, stateDiffs); - const enumerationIndexSizeBytes = ethers.utils.hexZeroPad(ethers.utils.hexlify(4), 1); - await setResult( - "Compressor", - "verifyCompressedStateDiffs", - [stateDiffs.length, 4, encodedStateDiffs, compressedStateDiffs], - { - failure: false, - returnData: ethers.utils.defaultAbiCoder.encode(["bytes32"], [ethers.utils.keccak256(encodedStateDiffs)]), - } - ); - const numberOfStateDiffsBytes = ethers.utils.hexZeroPad(ethers.utils.hexlify(stateDiffs.length), 4); - const compressedStateDiffsSizeBytes = ethers.utils.hexZeroPad( - ethers.utils.hexlify(ethers.utils.arrayify(compressedStateDiffs).length), - 3 - ); - return { - encodedStateDiffs, - compressedStateDiffs, - enumerationIndexSizeBytes, - numberOfStateDiffsBytes, - compressedStateDiffsSizeBytes, - }; -} - // Interface for L2ToL1Log struct. interface L2ToL1Log { l2ShardId: number; @@ -412,47 +378,34 @@ function setupLogData(l1MessengerAccount: ethers.Signer, l1Messenger: L1Messenge }; } -// Represents the structure of the bytecode/message data that is part of the pubdata. -interface ContentLengthPair { - content: string; - lengthBytes: string; -} - -async function setupBytecodeData(l1MessengerAddress: string): Promise { - const content = await getCode(l1MessengerAddress); - const lengthBytes = ethers.utils.hexZeroPad(ethers.utils.hexlify(ethers.utils.arrayify(content).length), 4); - return { - content, - lengthBytes, - }; -} - // Used for emulating the pubdata published by the L1Messenger. class L1MessengerPubdataEmulator implements EmulatorData { numberOfLogs: number; encodedLogs: string[]; - numberOfMessages: number; - messages: ContentLengthPair[]; - numberOfBytecodes: number; - bytecodes: ContentLengthPair[]; - stateDiffsSetupData: StateDiffSetupData; - version: string; + l2DaValidatorFunctionSig: string; + chainedLogsHash: string; + chainedLogsRootHash: string; + operatorDataOffset: number; + operatorDataLength: number; + + // These two fields are always zero, we need + // them just to extend the interface. + chainedMessagesHash: string; + chainedBytecodeHash: string; constructor() { this.numberOfLogs = 0; this.encodedLogs = []; - this.numberOfMessages = 0; - this.messages = []; - this.numberOfBytecodes = 0; - this.bytecodes = []; - this.stateDiffsSetupData = { - compressedStateDiffsSizeBytes: "", - enumerationIndexSizeBytes: "", - compressedStateDiffs: "", - numberOfStateDiffsBytes: "", - encodedStateDiffs: "", - }; - this.version = ethers.utils.hexZeroPad(ethers.utils.hexlify(1), 1); + + const factoryInterface = IL2DAValidatorFactory.connect( + ethers.constants.AddressZero, + new L2VoidSigner(ethers.constants.AddressZero) + ); + this.l2DaValidatorFunctionSig = factoryInterface.interface.getSighash("validatePubdata"); + + this.chainedLogsHash = ethers.constants.HashZero; + this.chainedLogsRootHash = ethers.constants.HashZero; + this.operatorDataOffset = EXPECTED_DA_INPUT_OFFSET; } addLog(log: string): void { @@ -460,70 +413,80 @@ class L1MessengerPubdataEmulator implements EmulatorData { this.numberOfLogs++; } - addMessage(message: ContentLengthPair): void { - this.messages.push(message); - this.numberOfMessages++; - } - - addBytecode(bytecode: ContentLengthPair): void { - this.bytecodes.push(bytecode); - this.numberOfBytecodes++; - } - - setStateDiffsSetupData(data: StateDiffSetupData) { - this.stateDiffsSetupData = data; - } + async buildTotalL2ToL1PubdataAndStateDiffs( + l1Messenger: L1Messenger, + overrideData: EmulatorOverrideData = {} + ): Promise { + const storedChainedMessagesHash = await l1Messenger.provider.getStorageAt(l1Messenger.address, 2); + const storedChainedBytecodesHash = await l1Messenger.provider.getStorageAt(l1Messenger.address, 3); - buildTotalL2ToL1PubdataAndStateDiffs(overrideData: EmulatorOverrideData = {}): string { const { + l2DaValidatorFunctionSig = this.l2DaValidatorFunctionSig, + chainedLogsHash = calculateChainedLogsHash(this.encodedLogs), + chainedLogsRootHash = calculateLogsRootHash(this.encodedLogs), + chainedMessagesHash = storedChainedMessagesHash, + chainedBytecodeHash = storedChainedBytecodesHash, + operatorDataOffset = this.operatorDataOffset, numberOfLogs = this.numberOfLogs, encodedLogs = this.encodedLogs, - numberOfMessages = this.numberOfMessages, - messages = this.messages, - numberOfBytecodes = this.numberOfBytecodes, - bytecodes = this.bytecodes, - stateDiffsSetupData = this.stateDiffsSetupData, - version = this.version, } = overrideData; - - const messagePairs = []; - for (let i = 0; i < numberOfMessages; i++) { - messagePairs.push(messages[i].lengthBytes, messages[i].content); - } - - const bytecodePairs = []; - for (let i = 0; i < numberOfBytecodes; i++) { - bytecodePairs.push(bytecodes[i].lengthBytes, bytecodes[i].content); - } + const operatorDataLength = overrideData.operatorDataLength + ? overrideData.operatorDataLength + : numberOfLogs * L2_TO_L1_LOG_SERIALIZE_SIZE + 4; return ethers.utils.concat([ + l2DaValidatorFunctionSig, + chainedLogsHash, + chainedLogsRootHash, + chainedMessagesHash, + chainedBytecodeHash, + ethers.utils.defaultAbiCoder.encode(["uint256"], [operatorDataOffset]), + ethers.utils.defaultAbiCoder.encode(["uint256"], [operatorDataLength]), ethers.utils.hexZeroPad(ethers.utils.hexlify(numberOfLogs), 4), ...encodedLogs, - ethers.utils.hexZeroPad(ethers.utils.hexlify(numberOfMessages), 4), - ...messagePairs, - ethers.utils.hexZeroPad(ethers.utils.hexlify(numberOfBytecodes), 4), - ...bytecodePairs, - version, - stateDiffsSetupData.compressedStateDiffsSizeBytes, - stateDiffsSetupData.enumerationIndexSizeBytes, - stateDiffsSetupData.compressedStateDiffs, - stateDiffsSetupData.numberOfStateDiffsBytes, - stateDiffsSetupData.encodedStateDiffs, ]); } } // Represents the structure of the data that the emulator uses. interface EmulatorData { + l2DaValidatorFunctionSig: string; + chainedLogsHash: string; + chainedLogsRootHash: string; + chainedMessagesHash: string; + chainedBytecodeHash: string; + operatorDataOffset: number; + operatorDataLength: number; numberOfLogs: number; encodedLogs: string[]; - numberOfMessages: number; - messages: ContentLengthPair[]; - numberOfBytecodes: number; - bytecodes: ContentLengthPair[]; - stateDiffsSetupData: StateDiffSetupData; - version: string; } // Represents a type that allows for overriding specific properties of the EmulatorData. // This is useful when you want to change some properties of the emulator data without affecting the others. type EmulatorOverrideData = Partial; + +function calculateChainedLogsHash(logs: string[]): string { + let hash = ethers.constants.HashZero; + for (const log of logs) { + const logHash = ethers.utils.keccak256(log); + hash = ethers.utils.keccak256(ethers.utils.concat([hash, logHash])); + } + + return hash; +} + +function calculateLogsRootHash(logs: string[]): string { + const logsTreeArray: string[] = new Array(L2_TO_L1_LOGS_MERKLE_TREE_LEAVES).fill(L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH); + for (let i = 0; i < logs.length; i++) { + logsTreeArray[i] = ethers.utils.keccak256(logs[i]); + } + + let length = L2_TO_L1_LOGS_MERKLE_TREE_LEAVES; + + while (length > 1) { + for (let i = 0; i < length; i += 2) { + logsTreeArray[i / 2] = ethers.utils.keccak256(ethers.utils.concat([logsTreeArray[i], logsTreeArray[i + 1]])); + } + length /= 2; + } + return logsTreeArray[0]; +} diff --git a/system-contracts/test/L2BaseToken.spec.ts b/system-contracts/test/L2BaseToken.spec.ts index 3ef04d590..d73f0444d 100644 --- a/system-contracts/test/L2BaseToken.spec.ts +++ b/system-contracts/test/L2BaseToken.spec.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; import { ethers, network } from "hardhat"; -import type { Wallet } from "zksync-web3"; +import type { Wallet } from "zksync-ethers"; import type { L2BaseToken } from "../typechain"; import { L2BaseTokenFactory } from "../typechain"; import { deployContractOnAddress, getWallets, loadArtifact, provider } from "./shared/utils"; @@ -53,9 +53,9 @@ describe("L2BaseToken tests", () => { it("not called by bootloader", async () => { const amountToMint: BigNumber = ethers.utils.parseEther("10.0"); - await expect(L2BaseToken.connect(wallets[0]).mint(wallets[0].address, amountToMint)).to.be.rejectedWith( - "Callable only by the bootloader" - ); + await expect( + L2BaseToken.connect(wallets[0]).mint(wallets[0].address, amountToMint) + ).to.be.revertedWithCustomError(L2BaseToken, "CallerMustBeBootloader"); }); }); @@ -90,7 +90,7 @@ describe("L2BaseToken tests", () => { await expect( L2BaseToken.connect(bootloaderAccount).transferFromTo(wallets[0].address, wallets[1].address, amountToTransfer) - ).to.be.rejectedWith("Transfer amount exceeds balance"); + ).to.be.revertedWithCustomError(L2BaseToken, "InsufficientFunds"); }); it("no transfer - require special access", async () => { @@ -107,7 +107,7 @@ describe("L2BaseToken tests", () => { wallets[1].address, amountToTransfer ) - ).to.be.rejectedWith("Only system contracts with special access can call this method"); + ).to.be.revertedWithCustomError(L2BaseToken, "Unauthorized"); }); }); diff --git a/system-contracts/test/L2GenesisUpgrade.spec.ts b/system-contracts/test/L2GenesisUpgrade.spec.ts new file mode 100644 index 000000000..a7914f705 --- /dev/null +++ b/system-contracts/test/L2GenesisUpgrade.spec.ts @@ -0,0 +1,140 @@ +import { expect } from "chai"; +import { ethers, network } from "hardhat"; +import * as zksync from "zksync-ethers"; +import type { ComplexUpgrader, L2GenesisUpgrade } from "../typechain"; +import { ComplexUpgraderFactory, L2GenesisUpgradeFactory } from "../typechain"; +import { + TEST_L2_GENESIS_UPGRADE_CONTRACT_ADDRESS, + TEST_FORCE_DEPLOYER_ADDRESS, + REAL_L2_ASSET_ROUTER_ADDRESS, + REAL_L2_MESSAGE_ROOT_ADDRESS, + TEST_COMPLEX_UPGRADER_CONTRACT_ADDRESS, + ADDRESS_ONE, +} from "./shared/constants"; +import { deployContractOnAddress, loadArtifact } from "./shared/utils"; +import { setResult } from "./shared/mocks"; + +describe("L2GenesisUpgrade tests", function () { + let l2GenesisUpgrade: L2GenesisUpgrade; + let complexUpgrader: ComplexUpgrader; + const chainId = 270; + + const ctmDeployerAddress = ethers.utils.hexlify(ethers.utils.randomBytes(20)); + const bridgehubOwnerAddress = ethers.utils.hexlify(ethers.utils.randomBytes(20)); + + const forceDeployments = [ + { + bytecodeHash: "0x0100056f53fd9e940906d998a80ed53392e5c50a8eb198baf9f78fd84ce7ec70", + newAddress: "0x0000000000000000000000000000000000020002", + callConstructor: true, + value: 0, + input: "0x", + }, + ]; + + let fixedForceDeploymentsData: string; + + const additionalForceDeploymentsData = ethers.utils.defaultAbiCoder.encode( + ["tuple(bytes32 baseTokenAssetId, address l2LegacySharedBridge, address l2Weth)"], + [ + { + baseTokenAssetId: "0x0100056f53fd9e940906d998a80ed53392e5c50a8eb198baf9f78fd84ce7ec70", + l2LegacySharedBridge: ADDRESS_ONE, + l2Weth: ADDRESS_ONE, + }, + ] + ); + + before(async () => { + const wallet = await ethers.getImpersonatedSigner(TEST_FORCE_DEPLOYER_ADDRESS); + await deployContractOnAddress(TEST_COMPLEX_UPGRADER_CONTRACT_ADDRESS, "ComplexUpgrader"); + await deployContractOnAddress(TEST_L2_GENESIS_UPGRADE_CONTRACT_ADDRESS, "L2GenesisUpgrade"); + complexUpgrader = ComplexUpgraderFactory.connect(TEST_COMPLEX_UPGRADER_CONTRACT_ADDRESS, wallet); + l2GenesisUpgrade = L2GenesisUpgradeFactory.connect(TEST_L2_GENESIS_UPGRADE_CONTRACT_ADDRESS, wallet); + + await setResult( + "IBridgehub", + "setAddresses", + [REAL_L2_ASSET_ROUTER_ADDRESS, ctmDeployerAddress, REAL_L2_MESSAGE_ROOT_ADDRESS], + { + failure: false, + returnData: "0x", + } + ); + await setResult("IBridgehub", "owner", [], { + failure: false, + returnData: ethers.utils.defaultAbiCoder.encode(["address"], [bridgehubOwnerAddress]), + }); + + await setResult("SystemContext", "setChainId", [chainId], { + failure: false, + returnData: "0x", + }); + + await setResult("ContractDeployer", "forceDeployOnAddresses", [forceDeployments], { + failure: false, + returnData: "0x", + }); + + const msgRootBytecode = (await loadArtifact("DummyMessageRoot")).bytecode; + const messageRootBytecodeHash = zksync.utils.hashBytecode(msgRootBytecode); + + const ntvBytecode = (await loadArtifact("DummyL2NativeTokenVault")).bytecode; + const ntvBytecodeHash = zksync.utils.hashBytecode(ntvBytecode); + + const l2AssetRouterBytecode = (await loadArtifact("DummyL2AssetRouter")).bytecode; + const l2AssetRouterBytecodeHash = zksync.utils.hashBytecode(l2AssetRouterBytecode); + + const bridgehubBytecode = (await loadArtifact("DummyBridgehub")).bytecode; + const bridgehubBytecodeHash = zksync.utils.hashBytecode(bridgehubBytecode); + + fixedForceDeploymentsData = ethers.utils.defaultAbiCoder.encode( + [ + "tuple(uint256 l1ChainId, uint256 eraChainId, address l1AssetRouter, bytes32 l2TokenProxyBytecodeHash, address aliasedL1Governance, uint256 maxNumberOfZKChains, bytes32 bridgehubBytecodeHash, bytes32 l2AssetRouterBytecodeHash, bytes32 l2NtvBytecodeHash, bytes32 messageRootBytecodeHash, address l2SharedBridgeLegacyImpl, address l2BridgedStandardERC20Impl, address l2BridgeProxyOwnerAddress, address l2BridgedStandardERC20ProxyOwnerAddress)", + ], + [ + { + l1ChainId: 1, + eraChainId: 1, + l1AssetRouter: ADDRESS_ONE, + l2TokenProxyBytecodeHash: "0x0100056f53fd9e940906d998a80ed53392e5c50a8eb198baf9f78fd84ce7ec70", + aliasedL1Governance: ADDRESS_ONE, + maxNumberOfZKChains: 100, + bridgehubBytecodeHash: bridgehubBytecodeHash, + l2AssetRouterBytecodeHash: l2AssetRouterBytecodeHash, + l2NtvBytecodeHash: ntvBytecodeHash, + messageRootBytecodeHash: messageRootBytecodeHash, + // For genesis upgrade these values will always be zero + l2SharedBridgeLegacyImpl: ethers.constants.AddressZero, + l2BridgedStandardERC20Impl: ethers.constants.AddressZero, + l2BridgeProxyOwnerAddress: ethers.constants.AddressZero, + l2BridgedStandardERC20ProxyOwnerAddress: ethers.constants.AddressZero, + }, + ] + ); + }); + + describe("upgrade", function () { + it("successfully upgraded", async () => { + const data = l2GenesisUpgrade.interface.encodeFunctionData("genesisUpgrade", [ + chainId, + ctmDeployerAddress, + fixedForceDeploymentsData, + additionalForceDeploymentsData, + ]); + + // Note, that the event is emitted at the complex upgrader, but the event declaration is taken from the l2GenesisUpgrade contract. + await expect(complexUpgrader.upgrade(l2GenesisUpgrade.address, data)) + .to.emit( + new ethers.Contract(complexUpgrader.address, l2GenesisUpgrade.interface, complexUpgrader.signer), + "UpgradeComplete" + ) + .withArgs(chainId); + + await network.provider.request({ + method: "hardhat_stopImpersonatingAccount", + params: [TEST_FORCE_DEPLOYER_ADDRESS], + }); + }); + }); +}); diff --git a/system-contracts/test/NonceHolder.spec.ts b/system-contracts/test/NonceHolder.spec.ts new file mode 100644 index 000000000..cbab36d21 --- /dev/null +++ b/system-contracts/test/NonceHolder.spec.ts @@ -0,0 +1,267 @@ +import { expect } from "chai"; +import type { NonceHolder } from "../typechain"; +import { NonceHolderFactory } from "../typechain"; +import { + TEST_DEPLOYER_SYSTEM_CONTRACT_ADDRESS, + TEST_NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, + TEST_SYSTEM_CONTEXT_CONTRACT_ADDRESS, +} from "./shared/constants"; +import { prepareEnvironment, setResult } from "./shared/mocks"; +import { deployContractOnAddress, getWallets } from "./shared/utils"; +import { ethers, network } from "hardhat"; +import { BigNumber } from "ethers"; + +describe("NonceHolder tests", () => { + const wallet = getWallets()[0]; + let nonceHolder: NonceHolder; + let systemAccount: ethers.Signer; + let deployerAccount: ethers.Signer; + + before(async () => { + await prepareEnvironment(); + await deployContractOnAddress(TEST_NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, "NonceHolder"); + nonceHolder = NonceHolderFactory.connect(TEST_NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, wallet); + + // Using a system account to satisfy the `onlySystemCall` modifier. + systemAccount = await ethers.getImpersonatedSigner(TEST_SYSTEM_CONTEXT_CONTRACT_ADDRESS); + deployerAccount = await ethers.getImpersonatedSigner(TEST_DEPLOYER_SYSTEM_CONTRACT_ADDRESS); + }); + + after(async () => { + await network.provider.request({ + method: "hardhat_stopImpersonatingAccount", + params: [TEST_SYSTEM_CONTEXT_CONTRACT_ADDRESS], + }); + await network.provider.request({ + method: "hardhat_stopImpersonatingAccount", + params: [TEST_DEPLOYER_SYSTEM_CONTRACT_ADDRESS], + }); + }); + + describe("increaseMinNonce and getters", () => { + it("should increase account minNonce by 1", async () => { + const nonceBefore = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceBefore = await nonceHolder.getRawNonce(systemAccount.address); + await nonceHolder.connect(systemAccount).increaseMinNonce(1); + const nonceAfter = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceAfter = await nonceHolder.getRawNonce(systemAccount.address); + + expect(nonceAfter).to.equal(nonceBefore.add(1)); + expect(rawNonceAfter).to.equal(rawNonceBefore.add(1)); + }); + + it("should stay the same", async () => { + const nonceBefore = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceBefore = await nonceHolder.getRawNonce(systemAccount.address); + await nonceHolder.connect(systemAccount).increaseMinNonce(0); + const nonceAfter = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceAfter = await nonceHolder.getRawNonce(systemAccount.address); + + expect(nonceBefore).to.equal(nonceAfter); + expect(rawNonceBefore).to.equal(rawNonceAfter); + }); + + it("should increase account minNonce by many", async () => { + const nonceBefore = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceBefore = await nonceHolder.getRawNonce(systemAccount.address); + await nonceHolder.connect(systemAccount).increaseMinNonce(2 ** 4); + const nonceAfter = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceAfter = await nonceHolder.getRawNonce(systemAccount.address); + + expect(nonceAfter).to.equal(nonceBefore.add(2 ** 4)); + expect(rawNonceAfter).to.equal(rawNonceBefore.add(2 ** 4)); + }); + + it("should fail with too high", async () => { + const nonceBefore = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceBefore = await nonceHolder.getRawNonce(systemAccount.address); + + await expect( + nonceHolder.connect(systemAccount).increaseMinNonce(BigNumber.from(2).pow(32).add(1)) + ).to.be.revertedWithCustomError(nonceHolder, "NonceIncreaseError"); + + const nonceAfter = await nonceHolder.getMinNonce(systemAccount.address); + const rawNonceAfter = await nonceHolder.getRawNonce(systemAccount.address); + + expect(nonceAfter).to.equal(nonceBefore); + expect(rawNonceAfter).to.equal(rawNonceBefore); + }); + + it("should revert This method require system call flag", async () => { + await expect(nonceHolder.increaseMinNonce(123)).to.be.revertedWithCustomError( + nonceHolder, + "SystemCallFlagRequired" + ); + }); + }); + + describe("incrementMinNonceIfEquals", async () => { + it("should revert This method require system call flag", async () => { + const expectedNonce = await nonceHolder.getMinNonce(systemAccount.address); + await expect(nonceHolder.incrementMinNonceIfEquals(expectedNonce)).to.be.revertedWithCustomError( + nonceHolder, + "SystemCallFlagRequired" + ); + }); + + it("should revert Incorrect nonce", async () => { + await expect(nonceHolder.connect(systemAccount).incrementMinNonceIfEquals(2222222)).to.be.revertedWithCustomError( + nonceHolder, + "ValueMismatch" + ); + }); + + it("should increment minNonce if equals to expected", async () => { + const expectedNonce = await nonceHolder.getMinNonce(systemAccount.address); + await nonceHolder.connect(systemAccount).incrementMinNonceIfEquals(expectedNonce); + const result = await nonceHolder.getMinNonce(systemAccount.address); + expect(result).to.equal(expectedNonce.add(1)); + }); + }); + + describe("incrementDeploymentNonce", async () => { + it("should revert Only the contract deployer can increment the deployment nonce", async () => { + await expect(nonceHolder.incrementDeploymentNonce(deployerAccount.address)).to.be.revertedWithCustomError( + nonceHolder, + "Unauthorized" + ); + }); + + it("should increment deployment nonce", async () => { + const nonceBefore = await nonceHolder.getDeploymentNonce(wallet.address); + const rawNonceBefore = await nonceHolder.getRawNonce(wallet.address); + await nonceHolder.connect(deployerAccount).incrementDeploymentNonce(wallet.address); + const nonceAfter = await nonceHolder.getDeploymentNonce(wallet.address); + const rawNonceAfter = await nonceHolder.getRawNonce(wallet.address); + + expect(nonceAfter).to.equal(nonceBefore.add(BigNumber.from(1))); + expect(rawNonceAfter).to.equal(rawNonceBefore.add(BigNumber.from(2).pow(128))); + }); + }); + + describe("setValueUnderNonce and getValueUnderNonce", async () => { + it("should revert Nonce value cannot be set to 0", async () => { + const accountInfo = [1, 0]; + const encodedAccountInfo = ethers.utils.defaultAbiCoder.encode(["tuple(uint8, uint8)"], [accountInfo]); + await setResult("ContractDeployer", "getAccountInfo", [systemAccount.address], { + failure: false, + returnData: encodedAccountInfo, + }); + await expect(nonceHolder.connect(systemAccount).setValueUnderNonce(124, 0)).to.be.revertedWithCustomError( + nonceHolder, + "ZeroNonceError" + ); + }); + + it("should revert Previous nonce has not been used", async () => { + const accountInfo = [1, 0]; + const encodedAccountInfo = ethers.utils.defaultAbiCoder.encode(["tuple(uint8, uint8)"], [accountInfo]); + await setResult("ContractDeployer", "getAccountInfo", [systemAccount.address], { + failure: false, + returnData: encodedAccountInfo, + }); + await expect(nonceHolder.connect(systemAccount).setValueUnderNonce(443, 111)).to.be.revertedWithCustomError( + nonceHolder, + "NonceJumpError" + ); + }); + + it("should emit ValueSetUnderNonce event", async () => { + const currentNonce = await nonceHolder.getMinNonce(systemAccount.address); + const valueBefore = await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce); + const value = valueBefore.add(42); + + const accountInfo = [1, 0]; + const encodedAccountInfo = ethers.utils.defaultAbiCoder.encode(["tuple(uint8, uint8)"], [accountInfo]); + await setResult("ContractDeployer", "getAccountInfo", [systemAccount.address], { + failure: false, + returnData: encodedAccountInfo, + }); + await expect(nonceHolder.connect(systemAccount).setValueUnderNonce(currentNonce, value)) + .to.emit(nonceHolder, "ValueSetUnderNonce") + .withArgs(systemAccount.address, currentNonce, value); + + const valueAfter = await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce); + expect(valueAfter).to.equal(value); + }); + + it("should emit ValueSetUnderNonce event arbitrary ordering", async () => { + const currentNonce = await nonceHolder.getMinNonce(systemAccount.address); + const encodedAccountInfo = ethers.utils.defaultAbiCoder.encode(["tuple(uint8, uint8)"], [[1, 1]]); + await setResult("ContractDeployer", "getAccountInfo", [systemAccount.address], { + failure: false, + returnData: encodedAccountInfo, + }); + + const firstValue = (await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce)).add(111); + await expect(nonceHolder.connect(systemAccount).setValueUnderNonce(currentNonce, firstValue)) + .to.emit(nonceHolder, "ValueSetUnderNonce") + .withArgs(systemAccount.address, currentNonce, firstValue); + + const secondValue = (await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce.add(2))).add(333); + await expect(nonceHolder.connect(systemAccount).setValueUnderNonce(currentNonce.add(2), secondValue)) + .to.emit(nonceHolder, "ValueSetUnderNonce") + .withArgs(systemAccount.address, currentNonce.add(2), secondValue); + + const thirdValue = (await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce.add(1))).add(222); + await expect(nonceHolder.connect(systemAccount).setValueUnderNonce(currentNonce.add(1), thirdValue)) + .to.emit(nonceHolder, "ValueSetUnderNonce") + .withArgs(systemAccount.address, currentNonce.add(1), thirdValue); + + const storedValue = await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce); + expect(storedValue).to.equal(firstValue); + const storedValueNext = await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce.add(1)); + expect(storedValueNext).to.equal(thirdValue); + const storedAfterNext = await nonceHolder.connect(systemAccount).getValueUnderNonce(currentNonce.add(2)); + expect(storedAfterNext).to.equal(secondValue); + }); + }); + + describe("isNonceUsed", () => { + it("used nonce because it too small", async () => { + const isUsed = await nonceHolder.isNonceUsed(systemAccount.address, 1); + expect(isUsed).to.equal(true); + }); + + it("used nonce because set", async () => { + const currentNonce = await nonceHolder.getMinNonce(systemAccount.address); + const checkedNonce = currentNonce.add(1); + await nonceHolder.connect(systemAccount).setValueUnderNonce(checkedNonce, 5); + + const isUsed = await nonceHolder.isNonceUsed(systemAccount.address, checkedNonce); + expect(isUsed).to.equal(true); + }); + + it("not used nonce", async () => { + const currentNonce = await nonceHolder.getMinNonce(systemAccount.address); + const checkedNonce = currentNonce.add(2137 * 2 ** 10); + + const isUsed = await nonceHolder.isNonceUsed(systemAccount.address, checkedNonce); + expect(isUsed).to.be.false; + }); + }); + + describe("validateNonceUsage", () => { + it("used nonce & should not be used", async () => { + await expect(nonceHolder.validateNonceUsage(systemAccount.address, 1, false)).to.be.revertedWithCustomError( + nonceHolder, + "NonceAlreadyUsed" + ); + }); + + it("used nonce & should be used", async () => { + await nonceHolder.validateNonceUsage(systemAccount.address, 1, true); + }); + + it("not used nonce & should be used", async () => { + await expect(nonceHolder.validateNonceUsage(systemAccount.address, 2 ** 16, true)).to.be.revertedWithCustomError( + nonceHolder, + "NonceNotUsed" + ); + }); + + it("not used nonce & should not be used", async () => { + await nonceHolder.validateNonceUsage(systemAccount.address, 2 ** 16, false); + }); + }); +}); diff --git a/system-contracts/test/PubdataChunkPublisher.spec.ts b/system-contracts/test/PubdataChunkPublisher.spec.ts index 49dd5b05f..68d4bfa5b 100644 --- a/system-contracts/test/PubdataChunkPublisher.spec.ts +++ b/system-contracts/test/PubdataChunkPublisher.spec.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; import { ethers, network } from "hardhat"; -import type { Wallet } from "zksync-web3"; +import type { Wallet } from "zksync-ethers"; import type { PubdataChunkPublisher } from "../typechain"; import { PubdataChunkPublisherFactory } from "../typechain"; import { TEST_L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, TEST_PUBDATA_CHUNK_PUBLISHER_ADDRESS } from "./shared/constants"; @@ -35,26 +35,22 @@ describe("PubdataChunkPublisher tests", () => { }); }); - describe("chunkAndPublishPubdata", () => { - it("non-L1Messenger failed to call", async () => { - await expect(pubdataChunkPublisher.chunkAndPublishPubdata("0x1337")).to.be.revertedWith("Inappropriate caller"); - }); - + describe("chunkPubdataToBlobs", () => { it("Too Much Pubdata", async () => { const pubdata = genRandHex(blobSizeInBytes * maxNumberBlobs + 1); await expect( - pubdataChunkPublisher.connect(l1MessengerAccount).chunkAndPublishPubdata(pubdata) - ).to.be.revertedWith("pubdata should fit in 6 blobs"); + pubdataChunkPublisher.connect(l1MessengerAccount).chunkPubdataToBlobs(pubdata) + ).to.be.revertedWithCustomError(pubdataChunkPublisher, "TooMuchPubdata"); }); it("Publish 1 Blob", async () => { const pubdata = genRandHex(blobSizeInBytes); - await pubdataChunkPublisher.connect(l1MessengerAccount).chunkAndPublishPubdata(pubdata); + await pubdataChunkPublisher.connect(l1MessengerAccount).chunkPubdataToBlobs(pubdata); }); it("Publish 2 Blobs", async () => { const pubdata = genRandHex(blobSizeInBytes * maxNumberBlobs); - await pubdataChunkPublisher.connect(l1MessengerAccount).chunkAndPublishPubdata(pubdata); + await pubdataChunkPublisher.connect(l1MessengerAccount).chunkPubdataToBlobs(pubdata); }); }); }); diff --git a/system-contracts/test/SystemContext.spec.ts b/system-contracts/test/SystemContext.spec.ts index dd23acaf2..2117c59da 100644 --- a/system-contracts/test/SystemContext.spec.ts +++ b/system-contracts/test/SystemContext.spec.ts @@ -28,7 +28,10 @@ describe("SystemContext tests", () => { describe("setTxOrigin", async () => { it("should revert not called by bootlader", async () => { const txOriginExpected = "0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef"; - await expect(systemContext.setTxOrigin(txOriginExpected)).to.be.rejectedWith("Callable only by the bootloader"); + await expect(systemContext.setTxOrigin(txOriginExpected)).to.be.revertedWithCustomError( + systemContext, + "CallerMustBeBootloader" + ); }); it("should set tx.origin", async () => { @@ -44,7 +47,10 @@ describe("SystemContext tests", () => { describe("setGasPrice", async () => { it("should revert not called by bootlader", async () => { const newGasPrice = 4294967295; - await expect(systemContext.setGasPrice(newGasPrice)).to.be.rejectedWith("Callable only by the bootloader"); + await expect(systemContext.setGasPrice(newGasPrice)).to.be.revertedWithCustomError( + systemContext, + "CallerMustBeBootloader" + ); }); it("should set tx.gasprice", async () => { @@ -92,7 +98,7 @@ describe("SystemContext tests", () => { const batchHash = await systemContext.getBatchHash(batchData.batchNumber); await expect( systemContext.setNewBatch(batchHash, batchData.batchTimestamp.add(1), batchData.batchNumber.add(1), 1) - ).to.be.rejectedWith("Callable only by the bootloader"); + ).to.be.revertedWithCustomError(systemContext, "CallerMustBeBootloader"); }); it("should revert timestamp should be incremental", async () => { @@ -159,7 +165,7 @@ describe("SystemContext tests", () => { true, 1 ) - ).to.be.rejectedWith("Callable only by the bootloader"); + ).to.be.revertedWithCustomError(systemContext, "CallerMustBeBootloader"); }); it("should revert The timestamp of the L2 block must be greater than or equal to the timestamp of the current batch", async () => { diff --git a/system-contracts/test/shared/constants.ts b/system-contracts/test/shared/constants.ts index c85f56415..c3f82c989 100644 --- a/system-contracts/test/shared/constants.ts +++ b/system-contracts/test/shared/constants.ts @@ -15,6 +15,7 @@ export const TEST_BOOTLOADER_UTILITIES_ADDRESS = "0x0000000000000000000000000000 export const TEST_COMPRESSOR_CONTRACT_ADDRESS = "0x000000000000000000000000000000000000900e"; export const TEST_COMPLEX_UPGRADER_CONTRACT_ADDRESS = "0x000000000000000000000000000000000000900f"; export const TEST_PUBDATA_CHUNK_PUBLISHER_ADDRESS = "0x0000000000000000000000000000000000009011"; +export const TEST_L2_GENESIS_UPGRADE_CONTRACT_ADDRESS = "0x0000000000000000000000000000000000010001"; // event writer should be on the original address because event logs are filtered by address export const REAL_EVENT_WRITER_CONTRACT_ADDRESS = "0x000000000000000000000000000000000000800d"; @@ -26,6 +27,12 @@ export const REAL_CODE_ORACLE_CONTRACT_ADDRESS = "0x0000000000000000000000000000 export const REAL_MSG_VALUE_SYSTEM_CONTRACT_ADDRESS = "0x0000000000000000000000000000000000008009"; export const REAL_SYSTEM_CONTEXT_ADDRESS = "0x000000000000000000000000000000000000800b"; +export const REAL_BRIDGEHUB_ADDRESS = "0x0000000000000000000000000000000000010002"; +export const REAL_L2_ASSET_ROUTER_ADDRESS = "0x0000000000000000000000000000000000010003"; +export const REAL_L2_MESSAGE_ROOT_ADDRESS = "0x0000000000000000000000000000000000010005"; + export const EMPTY_STRING_KECCAK = "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"; export const TWO_IN_256 = BigNumber.from(2).pow(256); export const ONE_BYTES32_HEX = "0x0000000000000000000000000000000000000000000000000000000000000001"; + +export const ADDRESS_ONE = "0x0000000000000000000000000000000000000001"; diff --git a/system-contracts/test/shared/mocks.ts b/system-contracts/test/shared/mocks.ts index 2b8c35654..8e38ba278 100644 --- a/system-contracts/test/shared/mocks.ts +++ b/system-contracts/test/shared/mocks.ts @@ -2,6 +2,7 @@ import { ethers } from "hardhat"; import type { MockContract } from "../../typechain"; import { MockContractFactory } from "../../typechain"; import { + TEST_DEPLOYER_SYSTEM_CONTRACT_ADDRESS, TEST_ACCOUNT_CODE_STORAGE_SYSTEM_CONTRACT_ADDRESS, TEST_BOOTLOADER_FORMAL_ADDRESS, TEST_BASE_TOKEN_SYSTEM_CONTRACT_ADDRESS, @@ -13,6 +14,8 @@ import { TEST_SYSTEM_CONTEXT_CONTRACT_ADDRESS, TEST_COMPRESSOR_CONTRACT_ADDRESS, TEST_PUBDATA_CHUNK_PUBLISHER_ADDRESS, + REAL_BRIDGEHUB_ADDRESS, + REAL_L2_MESSAGE_ROOT_ADDRESS, } from "./constants"; import { deployContractOnAddress, getWallets, loadArtifact } from "./utils"; @@ -26,6 +29,7 @@ type CallResult = { const TEST_SYSTEM_CONTRACTS_MOCKS = { Compressor: TEST_COMPRESSOR_CONTRACT_ADDRESS, SystemContext: TEST_SYSTEM_CONTEXT_CONTRACT_ADDRESS, + ContractDeployer: TEST_DEPLOYER_SYSTEM_CONTRACT_ADDRESS, NonceHolder: TEST_NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, L1Messenger: TEST_L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, KnownCodesStorage: TEST_KNOWN_CODE_STORAGE_CONTRACT_ADDRESS, @@ -35,6 +39,13 @@ const TEST_SYSTEM_CONTRACTS_MOCKS = { MsgValueSimulator: TEST_MSG_VALUE_SYSTEM_CONTRACT_ADDRESS, Bootloader: TEST_BOOTLOADER_FORMAL_ADDRESS, PubdataChunkPublisher: TEST_PUBDATA_CHUNK_PUBLISHER_ADDRESS, + // We use `IBridgehub` name, since this is the name of the file in the system-contracts folder. + // The contract itself is present in a different one. + // For bridgehub we mock the real address for simplicity. + // In case of need, it can be ported to use the test address. + IBridgehub: REAL_BRIDGEHUB_ADDRESS, + // For similar reasons we mock the L2 message real root only for simplicity + IMessageRoot: REAL_L2_MESSAGE_ROOT_ADDRESS, }; // Deploys mocks, and cleans previous call results during deployments. diff --git a/tools/Cargo.lock b/tools/Cargo.lock index 90d7c01b3..da25711f1 100644 --- a/tools/Cargo.lock +++ b/tools/Cargo.lock @@ -3,455 +3,6803 @@ version = 3 [[package]] -name = "ansi_term" -version = "0.12.1" +name = "Inflector" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" dependencies = [ - "winapi", + "lazy_static", + "regex", ] [[package]] -name = "atty" -version = "0.2.14" +name = "addchain" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" dependencies = [ - "hermit-abi", - "libc", - "winapi", + "num-bigint 0.3.3", + "num-integer", + "num-traits", ] [[package]] -name = "bitflags" -version = "1.3.2" +name = "addr2line" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] [[package]] -name = "block-buffer" -version = "0.10.4" +name = "adler" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] -name = "cfg-if" -version = "1.0.0" +name = "aes" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if 1.0.0", + "cipher", + "cpufeatures", +] [[package]] -name = "clap" -version = "2.34.0" +name = "aho-corasick" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ - "ansi_term", - "atty", - "bitflags", - "strsim", - "textwrap", - "unicode-width", - "vec_map", + "memchr", ] [[package]] -name = "cpufeatures" -version = "0.2.9" +name = "android-tzdata" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] -name = "crypto-common" -version = "0.1.6" +name = "ansi_term" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" dependencies = [ - "generic-array", - "typenum", + "winapi", ] [[package]] -name = "digest" -version = "0.10.7" +name = "anstream" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" dependencies = [ - "block-buffer", - "crypto-common", + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", ] [[package]] -name = "generic-array" -version = "0.14.7" +name = "anstyle" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" + +[[package]] +name = "anstyle-parse" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" dependencies = [ - "typenum", - "version_check", + "utf8parse", ] [[package]] -name = "handlebars" -version = "4.4.0" +name = "anstyle-query" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c39b3bc2a8f715298032cf5087e58573809374b08160aa7d750582bdb82d2683" +checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" dependencies = [ - "log", - "pest", - "pest_derive", - "serde", - "serde_json", - "thiserror", + "windows-sys 0.52.0", ] [[package]] -name = "heck" -version = "0.3.3" +name = "anstyle-wincon" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ - "unicode-segmentation", + "anstyle", + "windows-sys 0.52.0", ] [[package]] -name = "hermit-abi" -version = "0.1.19" +name = "anyhow" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" + +[[package]] +name = "arr_macro" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a105bfda48707cf19220129e78fca01e9639433ffaef4163546ed8fb04120a5" dependencies = [ - "libc", + "arr_macro_impl", + "proc-macro-hack", ] [[package]] -name = "itoa" -version = "1.0.9" +name = "arr_macro_impl" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "0609c78bd572f4edc74310dfb63a01f5609d53fa8b4dd7c4d98aef3b3e8d72d1" +dependencies = [ + "proc-macro-hack", + "quote 1.0.33", + "syn 1.0.109", +] [[package]] -name = "lazy_static" -version = "1.4.0" +name = "arrayref" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" [[package]] -name = "libc" -version = "0.2.148" +name = "arrayvec" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" +checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +dependencies = [ + "nodrop", +] [[package]] -name = "log" -version = "0.4.20" +name = "arrayvec" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] -name = "memchr" -version = "2.6.3" +name = "arrayvec" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] -name = "once_cell" -version = "1.18.0" +name = "ascii-canvas" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] [[package]] -name = "pest" -version = "2.7.3" +name = "async-trait" +version = "0.1.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a4d085fd991ac8d5b05a147b437791b4260b76326baf0fc60cf7c9c27ecd33" +checksum = "531b97fb4cd3dfdce92c35dedbfdc1f0b9d8091c8ca943d6dae340ef5012d514" dependencies = [ - "memchr", - "thiserror", - "ucd-trie", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", ] [[package]] -name = "pest_derive" -version = "2.7.3" +name = "async_io_stream" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bee7be22ce7918f641a33f08e3f43388c7656772244e2bbb2477f44cc9021a" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" dependencies = [ - "pest", - "pest_generator", + "futures", + "pharos", + "rustc_version", ] [[package]] -name = "pest_generator" -version = "2.7.3" +name = "atty" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1511785c5e98d79a05e8a6bc34b4ac2168a0e3e92161862030ad84daa223141" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "pest", - "pest_meta", - "proc-macro2", - "quote", - "syn 2.0.33", + "hermit-abi 0.1.19", + "libc", + "winapi", ] [[package]] -name = "pest_meta" -version = "2.7.3" +name = "auto_impl" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42f0394d3123e33353ca5e1e89092e533d2cc490389f2bd6131c43c634ebc5f" +checksum = "7862e21c893d65a1650125d157eaeec691439379a1cee17ee49031b79236ada4" dependencies = [ - "once_cell", - "pest", - "sha2", + "proc-macro-error", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", ] [[package]] -name = "proc-macro-error" -version = "1.0.4" +name = "auto_impl" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", ] [[package]] -name = "proc-macro-error-attr" -version = "1.0.4" +name = "autocfg" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ - "proc-macro2", - "quote", - "version_check", + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", ] [[package]] -name = "proc-macro2" -version = "1.0.67" +name = "axum-core" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ - "unicode-ident", + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", ] [[package]] -name = "quote" -version = "1.0.33" +name = "backtrace" +version = "0.3.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ - "proc-macro2", + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", ] [[package]] -name = "ryu" -version = "1.0.15" +name = "base16ct" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" [[package]] -name = "serde" -version = "1.0.188" +name = "base16ct" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" -dependencies = [ - "serde_derive", -] +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" [[package]] -name = "serde_derive" -version = "1.0.188" +name = "base58" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.33", -] +checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" [[package]] -name = "serde_json" -version = "1.0.107" +name = "base58check" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +checksum = "2ee2fe4c9a0c84515f136aaae2466744a721af6d63339c18689d9e995d74d99b" dependencies = [ - "itoa", - "ryu", - "serde", + "base58", + "sha2 0.8.2", ] [[package]] -name = "sha2" -version = "0.10.7" +name = "base64" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" [[package]] -name = "strsim" -version = "0.8.0" +name = "base64" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] -name = "structopt" -version = "0.3.26" +name = "base64" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" -dependencies = [ - "clap", - "lazy_static", - "structopt-derive", -] +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] -name = "structopt-derive" -version = "0.4.18" +name = "base64" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" -dependencies = [ - "heck", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] -name = "syn" -version = "1.0.109" +name = "base64ct" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] -name = "syn" -version = "2.0.33" +name = "bech32" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9caece70c63bfba29ec2fed841a09851b14a235c60010fa4de58089b6c025668" +checksum = "2dabbe35f96fb9507f7330793dc490461b2962659ac5d427181e451a623751d1" + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" + +[[package]] +name = "bellman_ce" +version = "0.3.2" +source = "git+https://github.com/matter-labs/bellman?branch=snark-wrapper#e01e5fa08a97a113e76ec8a69d06fe6cc2c82d17" dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", + "arrayvec 0.7.4", + "bit-vec", + "blake2s_const", + "blake2s_simd", + "byteorder", + "cfg-if 1.0.0", + "crossbeam 0.7.3", + "futures", + "hex", + "lazy_static", + "num_cpus", + "pairing_ce 0.28.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", ] [[package]] -name = "textwrap" -version = "0.11.0" +name = "bigdecimal" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" dependencies = [ - "unicode-width", + "num-bigint 0.4.5", + "num-integer", + "num-traits", ] [[package]] -name = "thiserror" -version = "1.0.48" +name = "bincode" +version = "1.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" dependencies = [ - "thiserror-impl", + "serde", ] [[package]] -name = "thiserror-impl" -version = "1.0.48" +name = "bit-set" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.33", + "bit-vec", ] [[package]] -name = "typenum" -version = "1.16.0" +name = "bit-vec" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +dependencies = [ + "serde", +] [[package]] -name = "ucd-trie" -version = "0.1.6" +name = "bitflags" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] -name = "unicode-ident" -version = "1.0.12" +name = "bitflags" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" [[package]] -name = "unicode-segmentation" -version = "1.10.1" +name = "bitvec" +version = "0.17.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" +dependencies = [ + "either", + "radium 0.3.0", +] [[package]] -name = "unicode-width" -version = "0.1.10" +name = "bitvec" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium 0.7.0", + "tap", + "wyz", +] [[package]] -name = "vec_map" -version = "0.8.2" +name = "blake2" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" +checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" +dependencies = [ + "crypto-mac", + "digest 0.9.0", + "opaque-debug 0.3.1", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "git+https://github.com/RustCrypto/hashes.git?rev=1f727ce37ff40fa0cce84eb8543a45bdd3ca4a4e#1f727ce37ff40fa0cce84eb8543a45bdd3ca4a4e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "blake2-rfc_bellman_edition" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc60350286c7c3db13b98e91dbe5c8b6830a6821bc20af5b0c310ce94d74915" +dependencies = [ + "arrayvec 0.4.12", + "byteorder", + "constant_time_eq", +] + +[[package]] +name = "blake2s_const" +version = "0.6.0" +source = "git+https://github.com/matter-labs/bellman?branch=snark-wrapper#e01e5fa08a97a113e76ec8a69d06fe6cc2c82d17" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding 0.1.5", + "byte-tools", + "byteorder", + "generic-array 0.12.4", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "block-padding 0.2.1", + "generic-array 0.14.7", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array 0.14.7", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "block-padding" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" + +[[package]] +name = "boojum" +version = "0.2.0" +source = "git+https://github.com/matter-labs/era-boojum.git?branch=main#19988079852ea22576da6b09e39365e6cdc1368f" +dependencies = [ + "arrayvec 0.7.4", + "bincode", + "blake2 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)", + "const_format", + "convert_case 0.6.0", + "crossbeam 0.8.4", + "crypto-bigint 0.5.5", + "cs_derive", + "derivative", + "ethereum-types", + "firestorm", + "itertools 0.10.5", + "lazy_static", + "num-modular", + "num_cpus", + "packed_simd", + "pairing_ce 0.28.5 (git+https://github.com/matter-labs/pairing.git)", + "rand 0.8.5", + "rayon", + "serde", + "sha2 0.10.8", + "sha3 0.10.6", + "smallvec", + "unroll", +] + +[[package]] +name = "bs58" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" +dependencies = [ + "serde", +] + +[[package]] +name = "bzip2" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" +dependencies = [ + "bzip2-sys", + "libc", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.11+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "camino" +version = "1.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "694c8807f2ae16faecc43dc17d74b3eb042482789fd0eb64b39a2e04e087053f" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "cc" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f6e324229dc011159fcc089755d1e2e216a90d43a7dea6853ca740b84f35e7" +dependencies = [ + "jobserver", + "libc", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-targets 0.52.5", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "circuit_definitions" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-zkevm_test_harness.git?branch=v1.4.0#39665dffd576cff5007c80dd0e1b5334e230bd3b" +dependencies = [ + "circuit_encodings 0.1.40", + "crossbeam 0.8.4", + "derivative", + "seq-macro", + "serde", + "snark_wrapper", + "zk_evm 1.4.0", + "zkevm_circuits 1.4.0", +] + +[[package]] +name = "circuit_definitions" +version = "1.5.0" +source = "git+https://github.com/matter-labs/era-zkevm_test_harness.git?branch=v1.5.0#394e1c7d1aec06d2f3abd63bdc2ddf0efef5ac49" +dependencies = [ + "circuit_encodings 0.1.50", + "crossbeam 0.8.4", + "derivative", + "seq-macro", + "serde", + "snark_wrapper", +] + +[[package]] +name = "circuit_encodings" +version = "0.1.40" +source = "git+https://github.com/matter-labs/era-zkevm_test_harness.git?branch=v1.4.0#39665dffd576cff5007c80dd0e1b5334e230bd3b" +dependencies = [ + "derivative", + "serde", + "zk_evm 1.4.0", + "zkevm_circuits 1.4.0", +] + +[[package]] +name = "circuit_encodings" +version = "0.1.50" +source = "git+https://github.com/matter-labs/era-zkevm_test_harness.git?branch=v1.5.0#394e1c7d1aec06d2f3abd63bdc2ddf0efef5ac49" +dependencies = [ + "derivative", + "serde", + "zk_evm 1.5.0", + "zkevm_circuits 1.5.0", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags 1.3.2", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "clap" +version = "4.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim 0.11.1", +] + +[[package]] +name = "clap_derive" +version = "4.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" +dependencies = [ + "heck 0.5.0", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "clap_lex" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" + +[[package]] +name = "codegen" +version = "0.1.0" +source = "git+https://github.com/matter-labs/solidity_plonk_verifier.git?branch=snark_wrapper#5fb698f5118990bf53648bfd7027363bc4b03ff2" +dependencies = [ + "ethereum-types", + "franklin-crypto", + "handlebars", + "hex", + "paste", + "rescue_poseidon", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "coins-bip32" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634c509653de24b439672164bbf56f5f582a2ab0e313d3b0f6af0b7345cf2560" +dependencies = [ + "bincode", + "bs58", + "coins-core", + "digest 0.10.7", + "getrandom", + "hmac", + "k256 0.11.6", + "lazy_static", + "serde", + "sha2 0.10.8", + "thiserror", +] + +[[package]] +name = "coins-bip39" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" +dependencies = [ + "bitvec 0.17.4", + "coins-bip32", + "getrandom", + "hex", + "hmac", + "pbkdf2", + "rand 0.8.5", + "sha2 0.10.8", + "thiserror", +] + +[[package]] +name = "coins-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94090a6663f224feae66ab01e41a2555a8296ee07b5f20dab8888bdefc9f617" +dependencies = [ + "base58check", + "base64 0.12.3", + "bech32", + "blake2 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)", + "digest 0.10.7", + "generic-array 0.14.7", + "hex", + "ripemd", + "serde", + "serde_derive", + "sha2 0.10.8", + "sha3 0.10.8", + "thiserror", +] + +[[package]] +name = "colorchoice" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" + +[[package]] +name = "colored" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +dependencies = [ + "lazy_static", + "windows-sys 0.48.0", +] + +[[package]] +name = "compile-fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bed69047ed42e52c7e38d6421eeb8ceefb4f2a2b52eed59137f7bad7908f6800" + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "const_format" +version = "0.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "unicode-xid 0.2.4", +] + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "crossbeam" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-channel 0.4.4", + "crossbeam-deque 0.7.4", + "crossbeam-epoch 0.8.2", + "crossbeam-queue 0.2.3", + "crossbeam-utils 0.7.2", +] + +[[package]] +name = "crossbeam" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" +dependencies = [ + "crossbeam-channel 0.5.12", + "crossbeam-deque 0.8.5", + "crossbeam-epoch 0.9.18", + "crossbeam-queue 0.3.11", + "crossbeam-utils 0.8.19", +] + +[[package]] +name = "crossbeam-channel" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" +dependencies = [ + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +dependencies = [ + "crossbeam-utils 0.8.19", +] + +[[package]] +name = "crossbeam-deque" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" +dependencies = [ + "crossbeam-epoch 0.8.2", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch 0.9.18", + "crossbeam-utils 0.8.19", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" +dependencies = [ + "autocfg", + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "lazy_static", + "maybe-uninit", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils 0.8.19", +] + +[[package]] +name = "crossbeam-queue" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +dependencies = [ + "crossbeam-utils 0.8.19", +] + +[[package]] +name = "crossbeam-utils" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" +dependencies = [ + "autocfg", + "cfg-if 0.1.10", + "lazy_static", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-boojum-validator-cli?branch=main#1661eef7b235fc10e92208fb738c9e261f58c653" +dependencies = [ + "anyhow", + "bincode", + "circuit_definitions 0.1.0", + "clap 4.5.4", + "codegen", + "colored", + "ethers", + "hex", + "once_cell", + "primitive-types", + "reqwest", + "serde", + "serde_json", + "sha3 0.9.1", + "tokio", + "zksync_types", +] + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array 0.14.7", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array 0.14.7", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array 0.14.7", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array 0.14.7", + "subtle", +] + +[[package]] +name = "cs_derive" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-boojum.git?branch=main#19988079852ea22576da6b09e39365e6cdc1368f" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid 1.8.0", +] + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2 1.0.82", + "quote 1.0.33", + "rustc_version", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "1.0.0-beta.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7abbfc297053be59290e3152f8cbcd52c8642e0728b69ee187d991d4c1af08d" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "1.0.0-beta.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bba3e9872d7c58ce7ef0fcf1844fcc3e23ef2a58377b50df35dd98e42a5726e" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", + "unicode-xid 0.2.4", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.4", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array 0.14.7", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if 1.0.0", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dtoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" + +[[package]] +name = "dunce" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der 0.6.1", + "elliptic-curve 0.12.3", + "rfc6979 0.3.1", + "signature 1.6.4", +] + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der 0.7.9", + "digest 0.10.7", + "elliptic-curve 0.13.8", + "rfc6979 0.4.0", + "signature 2.2.0", + "spki 0.7.3", +] + +[[package]] +name = "either" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct 0.1.1", + "crypto-bigint 0.4.9", + "der 0.6.1", + "digest 0.10.7", + "ff 0.12.1", + "generic-array 0.14.7", + "group 0.12.1", + "pkcs8 0.9.0", + "rand_core 0.6.4", + "sec1 0.3.0", + "subtle", + "zeroize", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct 0.2.0", + "crypto-bigint 0.5.5", + "digest 0.10.7", + "ff 0.13.0", + "generic-array 0.14.7", + "group 0.13.0", + "pem-rfc7468", + "pkcs8 0.10.2", + "rand_core 0.6.4", + "sec1 0.7.3", + "subtle", + "zeroize", +] + +[[package]] +name = "elsa" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98e71ae4df57d214182a2e5cb90230c0192c6ddfcaa05c36453d46a54713e10" +dependencies = [ + "stable_deref_trait", +] + +[[package]] +name = "ena" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +dependencies = [ + "log", +] + +[[package]] +name = "encoding_rs" +version = "0.8.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "envy" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f47e0157f2cb54f5ae1bd371b30a2ae4311e1c028f575cd4e81de7353215965" +dependencies = [ + "serde", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "eth-keystore" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" +dependencies = [ + "aes", + "ctr", + "digest 0.10.7", + "hex", + "hmac", + "pbkdf2", + "rand 0.8.5", + "scrypt", + "serde", + "serde_json", + "sha2 0.10.8", + "sha3 0.10.8", + "thiserror", + "uuid 0.8.2", +] + +[[package]] +name = "ethabi" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" +dependencies = [ + "ethereum-types", + "hex", + "once_cell", + "regex", + "serde", + "serde_json", + "sha3 0.10.8", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" +dependencies = [ + "crunchy", + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "scale-info", + "tiny-keccak 2.0.2", +] + +[[package]] +name = "ethereum-types" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "primitive-types", + "scale-info", + "uint", +] + +[[package]] +name = "ethers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f26f9d8d80da18ca72aca51804c65eb2153093af3bec74fd5ce32aa0c1f665" +dependencies = [ + "ethers-addressbook", + "ethers-contract", + "ethers-core", + "ethers-etherscan", + "ethers-middleware", + "ethers-providers", + "ethers-signers", + "ethers-solc", +] + +[[package]] +name = "ethers-addressbook" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4be54dd2260945d784e06ccdeb5ad573e8f1541838cee13a1ab885485eaa0b" +dependencies = [ + "ethers-core", + "once_cell", + "serde", + "serde_json", +] + +[[package]] +name = "ethers-contract" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9c3c3e119a89f0a9a1e539e7faecea815f74ddcf7c90d0b00d1f524db2fdc9c" +dependencies = [ + "ethers-contract-abigen", + "ethers-contract-derive", + "ethers-core", + "ethers-providers", + "futures-util", + "hex", + "once_cell", + "pin-project", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "ethers-contract-abigen" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d4e5ad46aede34901f71afdb7bb555710ed9613d88d644245c657dc371aa228" +dependencies = [ + "Inflector", + "cfg-if 1.0.0", + "dunce", + "ethers-core", + "eyre", + "getrandom", + "hex", + "proc-macro2 1.0.82", + "quote 1.0.33", + "regex", + "reqwest", + "serde", + "serde_json", + "syn 1.0.109", + "toml", + "url", + "walkdir", +] + +[[package]] +name = "ethers-contract-derive" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f192e8e4cf2b038318aae01e94e7644e0659a76219e94bcd3203df744341d61f" +dependencies = [ + "ethers-contract-abigen", + "ethers-core", + "hex", + "proc-macro2 1.0.82", + "quote 1.0.33", + "serde_json", + "syn 1.0.109", +] + +[[package]] +name = "ethers-core" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" +dependencies = [ + "arrayvec 0.7.4", + "bytes", + "cargo_metadata", + "chrono", + "convert_case 0.6.0", + "elliptic-curve 0.12.3", + "ethabi", + "generic-array 0.14.7", + "hex", + "k256 0.11.6", + "once_cell", + "open-fastrlp", + "proc-macro2 1.0.82", + "rand 0.8.5", + "rlp", + "rlp-derive", + "serde", + "serde_json", + "strum", + "syn 1.0.109", + "thiserror", + "tiny-keccak 2.0.2", + "unicode-xid 0.2.4", +] + +[[package]] +name = "ethers-etherscan" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9713f525348e5dde025d09b0a4217429f8074e8ff22c886263cc191e87d8216" +dependencies = [ + "ethers-core", + "getrandom", + "reqwest", + "semver", + "serde", + "serde-aux", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "ethers-middleware" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e71df7391b0a9a51208ffb5c7f2d068900e99d6b3128d3a4849d138f194778b7" +dependencies = [ + "async-trait", + "auto_impl 0.5.0", + "ethers-contract", + "ethers-core", + "ethers-etherscan", + "ethers-providers", + "ethers-signers", + "futures-locks", + "futures-util", + "instant", + "reqwest", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "url", +] + +[[package]] +name = "ethers-providers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a9e0597aa6b2fdc810ff58bc95e4eeaa2c219b3e615ed025106ecb027407d8" +dependencies = [ + "async-trait", + "auto_impl 1.2.0", + "base64 0.13.1", + "ethers-core", + "futures-channel", + "futures-core", + "futures-timer", + "futures-util", + "getrandom", + "hashers", + "hex", + "http", + "once_cell", + "parking_lot 0.11.2", + "pin-project", + "reqwest", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-tungstenite", + "tracing", + "tracing-futures", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-timer", + "web-sys", + "ws_stream_wasm", +] + +[[package]] +name = "ethers-signers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f41ced186867f64773db2e55ffdd92959e094072a1d09a5e5e831d443204f98" +dependencies = [ + "async-trait", + "coins-bip32", + "coins-bip39", + "elliptic-curve 0.12.3", + "eth-keystore", + "ethers-core", + "hex", + "rand 0.8.5", + "sha2 0.10.8", + "thiserror", +] + +[[package]] +name = "ethers-solc" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbe9c0a6d296c57191e5f8a613a3b5e816812c28f4a28d6178a17c21db903d77" +dependencies = [ + "cfg-if 1.0.0", + "dunce", + "ethers-core", + "getrandom", + "glob", + "hex", + "home", + "md-5", + "num_cpus", + "once_cell", + "path-slash", + "rayon", + "regex", + "semver", + "serde", + "serde_json", + "solang-parser", + "svm-rs", + "thiserror", + "tiny-keccak 2.0.2", + "tokio", + "tracing", + "walkdir", + "yansi", +] + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "fastrand" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "ff_ce" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" +dependencies = [ + "byteorder", + "ff_derive_ce", + "hex", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "ff_derive_ce" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b96fbccd88dbb1fac4ee4a07c2fcc4ca719a74ffbd9d2b9d41d8c8eb073d8b20" +dependencies = [ + "num-bigint 0.4.5", + "num-integer", + "num-traits", + "proc-macro2 1.0.82", + "quote 1.0.33", + "serde", + "syn 1.0.109", +] + +[[package]] +name = "findshlibs" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64" +dependencies = [ + "cc", + "lazy_static", + "libc", + "winapi", +] + +[[package]] +name = "firestorm" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c5f6c2c942da57e2aaaa84b8a521489486f14e75e7fa91dab70aba913975f98" + +[[package]] +name = "fixed-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "flate2" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "franklin-crypto" +version = "0.0.5" +source = "git+https://github.com/matter-labs/franklin-crypto?branch=snark_wrapper#2546c63b91b59bdb0ad342d26f03fb57477550b2" +dependencies = [ + "arr_macro", + "bellman_ce", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "boojum", + "byteorder", + "derivative", + "digest 0.9.0", + "hex", + "indexmap 1.9.3", + "itertools 0.10.5", + "lazy_static", + "num-bigint 0.4.5", + "num-derive", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-locks" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06" +dependencies = [ + "futures-channel", + "futures-task", +] + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff 0.12.1", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff 0.13.0", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 2.2.6", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "handlebars" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39b3bc2a8f715298032cf5087e58573809374b08160aa7d750582bdb82d2683" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashers" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2bca93b15ea5a746f220e56587f71e73c6165eab783df9e26590069953e3c30" +dependencies = [ + "fxhash", +] + +[[package]] +name = "headers" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" +dependencies = [ + "base64 0.21.7", + "bytes", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "impl-codec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + +[[package]] +name = "impl-serde" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown 0.14.5", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array 0.14.7", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "is-terminal" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "jobserver" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "k256" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" +dependencies = [ + "cfg-if 1.0.0", + "ecdsa 0.14.8", + "elliptic-curve 0.12.3", + "sha2 0.10.8", + "sha3 0.10.8", +] + +[[package]] +name = "k256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" +dependencies = [ + "cfg-if 1.0.0", + "ecdsa 0.16.9", + "elliptic-curve 0.13.8", + "once_cell", + "sha2 0.10.8", + "signature 2.2.0", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lalrpop" +version = "0.19.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" +dependencies = [ + "ascii-canvas", + "bit-set", + "diff", + "ena", + "is-terminal", + "itertools 0.10.5", + "lalrpop-util", + "petgraph", + "regex", + "regex-syntax 0.6.29", + "string_cache", + "term", + "tiny-keccak 2.0.2", + "unicode-xid 0.2.4", +] + +[[package]] +name = "lalrpop-util" +version = "0.19.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" +dependencies = [ + "regex", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.154" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.5.0", + "libc", +] + +[[package]] +name = "linkme" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3ae8aae8e1d516e0a3ceee1219eded7f73741607e4227bf11ef2c3e31580427" +dependencies = [ + "linkme-impl", +] + +[[package]] +name = "linkme-impl" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad083d767be37e709a232ae2a244445ed032bb9c6bf7d9442dd416ba5a7b7264" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "logos" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c000ca4d908ff18ac99b93a062cb8958d331c3220719c52e77cb19cc6ac5d2c1" +dependencies = [ + "logos-derive", +] + +[[package]] +name = "logos-codegen" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68" +dependencies = [ + "beef", + "fnv", + "proc-macro2 1.0.82", + "quote 1.0.33", + "regex-syntax 0.6.29", + "syn 2.0.33", +] + +[[package]] +name = "logos-derive" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbfc0d229f1f42d790440136d941afd806bc9e949e2bcb8faa813b0f00d1267e" +dependencies = [ + "logos-codegen", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if 1.0.0", + "digest 0.10.7", +] + +[[package]] +name = "memchr" +version = "2.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" + +[[package]] +name = "memoffset" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" +dependencies = [ + "autocfg", +] + +[[package]] +name = "miette" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e" +dependencies = [ + "miette-derive", + "once_cell", + "thiserror", + "unicode-width", +] + +[[package]] +name = "miette-derive" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3135b08af27d103b0a51f2ae0f8632117b7b185ccf931445affa8df530576a41" +dependencies = [ + "num-bigint 0.4.5", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" +dependencies = [ + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23c6602fda94a57c990fe0df199a035d83576b496aa29f4e634a8ac6004e68a6" +dependencies = [ + "num-traits", + "serde", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-derive" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eafd0b45c5537c3ba526f79d3e75120036502bebacbb3f3220914067ce39dbf2" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-modular" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a5fe11d4135c3bcdf3a95b18b194afa9608a5f6ff034f5d857bc9a27fb0119" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg", + "num-bigint 0.4.5", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "num_enum" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" +dependencies = [ + "num_enum_derive 0.6.1", +] + +[[package]] +name = "num_enum" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" +dependencies = [ + "num_enum_derive 0.7.2", +] + +[[package]] +name = "num_enum_derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" +dependencies = [ + "proc-macro-crate 3.1.0", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "open-fastrlp" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" +dependencies = [ + "arrayvec 0.7.4", + "auto_impl 1.2.0", + "bytes", + "ethereum-types", + "open-fastrlp-derive", +] + +[[package]] +name = "open-fastrlp-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" +dependencies = [ + "bytes", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "openssl" +version = "0.10.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +dependencies = [ + "bitflags 2.5.0", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9591d937bc0e6d2feb6f71a559540ab300ea49955229c347a517a28d27784c54" +dependencies = [ + "opentelemetry_api", + "opentelemetry_sdk", +] + +[[package]] +name = "opentelemetry-http" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry_api", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" +dependencies = [ + "async-trait", + "futures-core", + "http", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry-semantic-conventions", + "opentelemetry_api", + "opentelemetry_sdk", + "prost 0.11.9", + "reqwest", + "thiserror", + "tokio", + "tonic", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1e3f814aa9f8c905d0ee4bde026afd3b2577a97c10e1699912e3e44f0c4cbeb" +dependencies = [ + "opentelemetry_api", + "opentelemetry_sdk", + "prost 0.11.9", + "tonic", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73c9f9340ad135068800e7f1b24e9e09ed9e7143f5bf8518ded3d3ec69789269" +dependencies = [ + "opentelemetry", +] + +[[package]] +name = "opentelemetry_api" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a81f725323db1b1206ca3da8bb19874bbd3f57c3bcd59471bfb04525b265b9b" +dependencies = [ + "futures-channel", + "futures-util", + "indexmap 1.9.3", + "js-sys", + "once_cell", + "pin-project-lite", + "thiserror", + "urlencoding", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa8e705a0612d48139799fcbaba0d4a90f06277153e43dd2bdc16c6f0edd8026" +dependencies = [ + "async-trait", + "crossbeam-channel 0.5.12", + "futures-channel", + "futures-executor", + "futures-util", + "once_cell", + "opentelemetry_api", + "ordered-float 3.9.2", + "percent-encoding", + "rand 0.8.5", + "regex", + "serde_json", + "thiserror", + "tokio", + "tokio-stream", +] + +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ordered-float" +version = "3.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" +dependencies = [ + "num-traits", +] + +[[package]] +name = "os_info" +version = "3.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" +dependencies = [ + "log", + "serde", + "windows-sys 0.52.0", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa 0.16.9", + "elliptic-curve 0.13.8", + "primeorder", + "sha2 0.10.8", +] + +[[package]] +name = "packed_simd" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f9f08af0c877571712e2e3e686ad79efad9657dbf0f7c3c8ba943ff6c38932d" +dependencies = [ + "cfg-if 1.0.0", + "num-traits", +] + +[[package]] +name = "pairing_ce" +version = "0.28.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db007b21259660d025918e653508f03050bf23fb96a88601f9936329faadc597" +dependencies = [ + "byteorder", + "cfg-if 1.0.0", + "ff_ce", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "pairing_ce" +version = "0.28.5" +source = "git+https://github.com/matter-labs/pairing.git#d24f2c5871089c4cd4f54c0ca266bb9fef6115eb" +dependencies = [ + "byteorder", + "cfg-if 1.0.0", + "ff_ce", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "parity-scale-codec" +version = "3.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "881331e34fa842a2fb61cc2db9643a8fedc615e47cfcc52597d1af0db9a7e8fe" +dependencies = [ + "arrayvec 0.7.4", + "bitvec 1.0.1", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" +dependencies = [ + "proc-macro-crate 2.0.0", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.10", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.5.1", + "smallvec", + "windows-targets 0.52.5", +] + +[[package]] +name = "password-hash" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "path-slash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest 0.10.7", + "hmac", + "password-hash", + "sha2 0.10.8", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a4d085fd991ac8d5b05a147b437791b4260b76326baf0fc60cf7c9c27ecd33" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bee7be22ce7918f641a33f08e3f43388c7656772244e2bbb2477f44cc9021a" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1511785c5e98d79a05e8a6bc34b4ac2168a0e3e92161862030ad84daa223141" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "pest_meta" +version = "2.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42f0394d3123e33353ca5e1e89092e533d2cc490389f2bd6131c43c634ebc5f" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.8", +] + +[[package]] +name = "petgraph" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +dependencies = [ + "fixedbitset", + "indexmap 2.2.6", +] + +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures", + "rustc_version", +] + +[[package]] +name = "phf" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +dependencies = [ + "phf_macros", + "phf_shared", + "proc-macro-hack", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro-hack", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der 0.6.1", + "spki 0.6.0", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der 0.7.9", + "spki 0.7.3", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "prettyplease" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" +dependencies = [ + "proc-macro2 1.0.82", + "syn 2.0.33", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve 0.13.8", +] + +[[package]] +name = "primitive-types" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "scale-info", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.15", +] + +[[package]] +name = "proc-macro-crate" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +dependencies = [ + "toml_edit 0.20.7", +] + +[[package]] +name = "proc-macro-crate" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +dependencies = [ + "toml_edit 0.21.1", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid 0.1.0", +] + +[[package]] +name = "proc-macro2" +version = "1.0.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus-client" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1ca959da22a332509f2a73ae9e5f23f9dcfc31fd3a54d71f159495bd5909baa" +dependencies = [ + "dtoa", + "itoa", + "parking_lot 0.12.2", + "prometheus-client-derive-encode", +] + +[[package]] +name = "prometheus-client-derive-encode" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "prost" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +dependencies = [ + "bytes", + "prost-derive 0.11.9", +] + +[[package]] +name = "prost" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0f5d036824e4761737860779c906171497f6d55681139d8312388f8fe398922" +dependencies = [ + "bytes", + "prost-derive 0.12.4", +] + +[[package]] +name = "prost-build" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80b776a1b2dc779f5ee0641f8ade0125bc1298dd41a9a0c16d8bd57b42d222b1" +dependencies = [ + "bytes", + "heck 0.4.1", + "itertools 0.12.1", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost 0.12.4", + "prost-types", + "regex", + "syn 2.0.33", + "tempfile", +] + +[[package]] +name = "prost-derive" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +dependencies = [ + "anyhow", + "itertools 0.10.5", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "prost-derive" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19de2de2a00075bf566bee3bd4db014b11587e84184d3f7a791bc17f1a8e9e48" +dependencies = [ + "anyhow", + "itertools 0.12.1", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "prost-reflect" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "057237efdb71cf4b3f9396302a3d6599a92fa94063ba537b66130980ea9909f3" +dependencies = [ + "base64 0.21.7", + "logos", + "miette", + "once_cell", + "prost 0.12.4", + "prost-types", + "serde", + "serde-value", +] + +[[package]] +name = "prost-types" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3235c33eb02c1f1e212abdbe34c78b264b038fb58ca612664343271e36e55ffe" +dependencies = [ + "prost 0.12.4", +] + +[[package]] +name = "protox" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00bb76c5f6221de491fe2c8f39b106330bbd9762c6511119c07940e10eb9ff11" +dependencies = [ + "bytes", + "miette", + "prost 0.12.4", + "prost-reflect", + "prost-types", + "protox-parse", + "thiserror", +] + +[[package]] +name = "protox-parse" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4581f441c58863525a3e6bec7b8de98188cf75239a56c725a3e7288450a33f" +dependencies = [ + "logos", + "miette", + "prost-types", + "thiserror", +] + +[[package]] +name = "quick-protobuf" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" +dependencies = [ + "byteorder", +] + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2 1.0.82", +] + +[[package]] +name = "radium" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque 0.8.5", + "crossbeam-utils 0.8.19", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +dependencies = [ + "bitflags 2.5.0", +] + +[[package]] +name = "redox_users" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.6", + "regex-syntax 0.8.3", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.3", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls 0.21.12", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tokio-rustls 0.24.1", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots 0.25.4", + "winreg", +] + +[[package]] +name = "rescue_poseidon" +version = "0.4.1" +source = "git+https://github.com/matter-labs/rescue-poseidon?branch=poseidon2#126937ef0e7a281f1ff9f512ac41a746a691a342" +dependencies = [ + "addchain", + "arrayvec 0.7.4", + "blake2 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder", + "derivative", + "franklin-crypto", + "lazy_static", + "log", + "num-bigint 0.3.3", + "num-integer", + "num-iter", + "num-traits", + "rand 0.4.6", + "serde", + "sha3 0.9.1", + "smallvec", + "typemap_rev", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint 0.4.9", + "hmac", + "zeroize", +] + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if 1.0.0", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "ripemd" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes", + "rustc-hex", +] + +[[package]] +name = "rlp-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags 2.5.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092474d1a01ea8278f69e6a358998405fae5b8b963ddaeb2b0b04a128bf1dfb0" + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "salsa20" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +dependencies = [ + "cipher", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scale-info" +version = "2.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c453e59a955f81fb62ee5d596b450383d699f152d350e9d23a0db2adb78e4c0" +dependencies = [ + "cfg-if 1.0.0", + "derive_more 0.99.17", + "parity-scale-codec", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "2.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18cf6c6447f813ef19eb450e985bcce6705f9ce7660db221b59093d15c79c4b7" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "scrypt" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" +dependencies = [ + "hmac", + "pbkdf2", + "salsa20", + "sha2 0.10.8", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct 0.1.1", + "der 0.6.1", + "generic-array 0.14.7", + "pkcs8 0.9.0", + "subtle", + "zeroize", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct 0.2.0", + "der 0.7.9", + "generic-array 0.14.7", + "pkcs8 0.10.2", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25996b82292a7a57ed3508f052cfff8640d38d32018784acd714758b43da9c8f" +dependencies = [ + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a129b9e9efbfb223753b9163c4ab3b13cff7fd9c7f010fbac25ab4099fa07e" +dependencies = [ + "cc", +] + +[[package]] +name = "security-framework" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +dependencies = [ + "bitflags 2.5.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +dependencies = [ + "serde", +] + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + +[[package]] +name = "sentry" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce4b57f1b521f674df7a1d200be8ff5d74e3712020ee25b553146657b5377d5" +dependencies = [ + "httpdate", + "native-tls", + "reqwest", + "sentry-backtrace", + "sentry-contexts", + "sentry-core", + "sentry-debug-images", + "sentry-panic", + "sentry-tracing", + "tokio", + "ureq", +] + +[[package]] +name = "sentry-backtrace" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58cc8d4e04a73de8f718dc703943666d03f25d3e9e4d0fb271ca0b8c76dfa00e" +dependencies = [ + "backtrace", + "once_cell", + "regex", + "sentry-core", +] + +[[package]] +name = "sentry-contexts" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6436c1bad22cdeb02179ea8ef116ffc217797c028927def303bc593d9320c0d1" +dependencies = [ + "hostname", + "libc", + "os_info", + "rustc_version", + "sentry-core", + "uname", +] + +[[package]] +name = "sentry-core" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "901f761681f97db3db836ef9e094acdd8756c40215326c194201941947164ef1" +dependencies = [ + "once_cell", + "rand 0.8.5", + "sentry-types", + "serde", + "serde_json", +] + +[[package]] +name = "sentry-debug-images" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afdb263e73d22f39946f6022ed455b7561b22ff5553aca9be3c6a047fa39c328" +dependencies = [ + "findshlibs", + "once_cell", + "sentry-core", +] + +[[package]] +name = "sentry-panic" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74fbf1c163f8b6a9d05912e1b272afa27c652e8b47ea60cb9a57ad5e481eea99" +dependencies = [ + "sentry-backtrace", + "sentry-core", +] + +[[package]] +name = "sentry-tracing" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82eabcab0a047040befd44599a1da73d3adb228ff53b5ed9795ae04535577704" +dependencies = [ + "sentry-backtrace", + "sentry-core", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "sentry-types" +version = "0.31.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da956cca56e0101998c8688bc65ce1a96f00673a0e58e663664023d4c7911e82" +dependencies = [ + "debugid", + "hex", + "rand 0.8.5", + "serde", + "serde_json", + "thiserror", + "time", + "url", + "uuid 1.8.0", +] + +[[package]] +name = "seq-macro" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" + +[[package]] +name = "serde" +version = "1.0.193" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-aux" +version = "4.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d2e8bfba469d06512e11e3311d4d051a4a387a5b42d010404fecf3200321c95" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float 2.10.1", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.193" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "serde_json" +version = "1.0.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15e0ef66bf939a7c890a0bf6d5a733c70202225f9888a89ed5c62298b019129" +dependencies = [ + "indexmap 2.2.6", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug 0.3.1", +] + +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug 0.2.3", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug 0.3.1", +] + +[[package]] +name = "sha2" +version = "0.10.6" +source = "git+https://github.com/RustCrypto/hashes.git?rev=1731ced4a116d61ba9dc6ee6d0f38fb8102e357a#1731ced4a116d61ba9dc6ee6d0f38fb8102e357a" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha3" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "keccak", + "opaque-debug 0.3.1", +] + +[[package]] +name = "sha3" +version = "0.10.6" +source = "git+https://github.com/RustCrypto/hashes.git?rev=7a187e934c1f6c68e4b4e5cf37541b7a0d64d303#7a187e934c1f6c68e4b4e5cf37541b7a0d64d303" +dependencies = [ + "digest 0.10.7", + "keccak", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest 0.10.7", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +dependencies = [ + "serde", +] + +[[package]] +name = "snark_wrapper" +version = "0.1.0" +source = "git+https://github.com/matter-labs/snark-wrapper.git?branch=main#76959cadabeec344b9fa1458728400d60340e496" +dependencies = [ + "derivative", + "rand 0.4.6", + "rescue_poseidon", +] + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "soketto" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +dependencies = [ + "base64 0.13.1", + "bytes", + "futures", + "httparse", + "log", + "rand 0.8.5", + "sha-1 0.9.8", +] + +[[package]] +name = "solang-parser" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac8ac4bfef383f368bd9bb045107a501cd9cd0b64ad1983e1b7e839d6a44ecad" +dependencies = [ + "itertools 0.10.5", + "lalrpop", + "lalrpop-util", + "phf", + "unicode-xid 0.2.4", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der 0.6.1", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der 0.7.9", +] + +[[package]] +name = "splitmut" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85070f382340e8b23a75808e83573ddf65f9ad9143df9573ca37c1ed2ee956a" + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot 0.12.2", + "phf_shared", + "precomputed-hash", +] + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap 2.34.0", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.82", + "quote 1.0.33", + "rustversion", + "syn 1.0.109", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "svm-rs" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a04fc4f5cd35c700153b233f5575ccb3237e0f941fa5049d9e98254d10bf2fe" +dependencies = [ + "fs2", + "hex", + "home", + "once_cell", + "reqwest", + "semver", + "serde", + "serde_json", + "sha2 0.10.8", + "thiserror", + "url", + "zip", +] + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid 0.1.0", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9caece70c63bfba29ec2fed841a09851b14a235c60010fa4de58089b6c025668" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if 1.0.0", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e3de26b0965292219b4287ff031fcba86837900fe9cd2b34ea8ad893c0953d2" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "268026685b2be38d7103e9e507c938a1fcb3d7e6eb15e87870b617bf37b6d581" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d8a021c69bb74a44ccedb824a046447e2c84a01df9e5c20779750acb38e11b2" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.37.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot 0.12.2", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181" +dependencies = [ + "futures-util", + "log", + "rustls 0.20.9", + "tokio", + "tokio-rustls 0.23.4", + "tungstenite", + "webpki", + "webpki-roots 0.22.6", +] + +[[package]] +name = "tokio-util" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.2.6", + "toml_datetime", + "winnow", +] + +[[package]] +name = "toml_edit" +version = "0.20.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +dependencies = [ + "indexmap 2.2.6", + "toml_datetime", + "winnow", +] + +[[package]] +name = "toml_edit" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +dependencies = [ + "indexmap 2.2.6", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tonic" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" +dependencies = [ + "async-trait", + "axum", + "base64 0.21.7", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost 0.11.9", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75327c6b667828ddc28f5e3f169036cb793c3f588d83bf0f262a7f062ffed3c8" +dependencies = [ + "once_cell", + "opentelemetry", + "opentelemetry_sdk", + "smallvec", + "tracing", + "tracing-core", + "tracing-log 0.1.4", + "tracing-subscriber", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "time", + "tracing", + "tracing-core", + "tracing-log 0.2.0", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" +dependencies = [ + "base64 0.13.1", + "byteorder", + "bytes", + "http", + "httparse", + "log", + "rand 0.8.5", + "rustls 0.20.9", + "sha-1 0.10.1", + "thiserror", + "url", + "utf-8", + "webpki", +] + +[[package]] +name = "typemap_rev" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74b08b0c1257381af16a5c3605254d529d3e7e109f3c62befc5d168968192998" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "uname" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" +dependencies = [ + "libc", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "unroll" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ad948c1cb799b1a70f836077721a92a35ac177d4daddf4c20a633786d4cf618" +dependencies = [ + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "ureq" +version = "2.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d11a831e3c0b56e438a28308e7c810799e3c118417f342d30ecec080105395cd" +dependencies = [ + "base64 0.22.1", + "log", + "native-tls", + "once_cell", + "url", +] + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna 0.5.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", + "serde", +] + +[[package]] +name = "uuid" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" +dependencies = [ + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vise" +version = "0.1.0" +source = "git+https://github.com/matter-labs/vise.git?rev=a5bb80c9ce7168663114ee30e794d6dc32159ee4#a5bb80c9ce7168663114ee30e794d6dc32159ee4" +dependencies = [ + "compile-fmt", + "elsa", + "linkme", + "once_cell", + "prometheus-client", + "vise-macros", +] + +[[package]] +name = "vise-macros" +version = "0.1.0" +source = "git+https://github.com/matter-labs/vise.git?rev=a5bb80c9ce7168663114ee30e794d6dc32159ee4#a5bb80c9ce7168663114ee30e794d6dc32159ee4" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "vlog" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "chrono", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "sentry", + "serde_json", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote 1.0.33", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2 1.0.82", + "quote 1.0.33", + "syn 2.0.33", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "wasm-timer" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +dependencies = [ + "futures", + "js-sys", + "parking_lot 0.11.2", + "pin-utils", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web3" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5388522c899d1e1c96a4c307e3797e0f697ba7c77dd8e0e625ecba9dd0342937" +dependencies = [ + "arrayvec 0.7.4", + "base64 0.21.7", + "bytes", + "derive_more 0.99.17", + "ethabi", + "ethereum-types", + "futures", + "futures-timer", + "headers", + "hex", + "idna 0.4.0", + "jsonrpc-core", + "log", + "once_cell", + "parking_lot 0.12.2", + "pin-project", + "reqwest", + "rlp", + "secp256k1", + "serde", + "serde_json", + "soketto", + "tiny-keccak 2.0.2", + "tokio", + "tokio-stream", + "tokio-util", + "url", + "web3-async-native-tls", +] + +[[package]] +name = "web3-async-native-tls" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f6d8d1636b2627fe63518d5a9b38a569405d9c9bc665c43c9c341de57227ebb" +dependencies = [ + "native-tls", + "thiserror", + "tokio", + "url", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] [[package]] -name = "winapi" -version = "0.3.9" +name = "windows-targets" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", ] [[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" +name = "windows_aarch64_gnullvm" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" +name = "windows_aarch64_gnullvm" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if 1.0.0", + "windows-sys 0.48.0", +] + +[[package]] +name = "ws_stream_wasm" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" +dependencies = [ + "async_io_stream", + "futures", + "js-sys", + "log", + "pharos", + "rustc_version", + "send_wrapper", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" + +[[package]] +name = "zip" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" +dependencies = [ + "aes", + "byteorder", + "bzip2", + "constant_time_eq", + "crc32fast", + "crossbeam-utils 0.8.19", + "flate2", + "hmac", + "pbkdf2", + "sha1", + "time", + "zstd", +] + +[[package]] +name = "zk_evm" +version = "1.3.3" +source = "git+https://github.com/matter-labs/era-zk_evm.git?tag=v1.3.3-rc2#fbee20f5bac7d6ca3e22ae69b2077c510a07de4e" +dependencies = [ + "anyhow", + "lazy_static", + "num", + "serde", + "serde_json", + "static_assertions", + "zk_evm_abstractions 0.1.0", + "zkevm_opcode_defs 1.3.2", +] + +[[package]] +name = "zk_evm" +version = "1.4.0" +source = "git+https://github.com/matter-labs/era-zk_evm.git?branch=v1.4.0#dd76fc5badf2c05278a21b38015a7798fe2fe358" +dependencies = [ + "anyhow", + "lazy_static", + "num", + "serde", + "serde_json", + "static_assertions", + "zk_evm_abstractions 0.1.0", + "zkevm_opcode_defs 1.3.2", +] + +[[package]] +name = "zk_evm" +version = "1.5.0" +source = "git+https://github.com/matter-labs/era-zk_evm.git?branch=v1.5.0#6119ce908ab714f2f99804794e725b97298a6b11" +dependencies = [ + "anyhow", + "lazy_static", + "num", + "serde", + "serde_json", + "static_assertions", + "zk_evm_abstractions 1.5.0", +] + +[[package]] +name = "zk_evm_abstractions" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-zk_evm_abstractions.git#32dd320953841aa78579d9da08abbc70bcaed175" +dependencies = [ + "anyhow", + "num_enum 0.6.1", + "serde", + "static_assertions", + "zkevm_opcode_defs 1.3.2", +] + +[[package]] +name = "zk_evm_abstractions" +version = "1.5.0" +source = "git+https://github.com/matter-labs/era-zk_evm_abstractions.git?branch=v1.5.0#e464b2cf2b146d883be80e7d690c752bf670ff05" +dependencies = [ + "anyhow", + "num_enum 0.6.1", + "serde", + "static_assertions", + "zkevm_opcode_defs 1.5.0", +] + +[[package]] +name = "zkevm_circuits" +version = "1.4.0" +source = "git+https://github.com/matter-labs/era-zkevm_circuits.git?branch=v1.4.0#fb3e2574b5c890342518fc930c145443f039a105" +dependencies = [ + "arrayvec 0.7.4", + "bincode", + "boojum", + "cs_derive", + "derivative", + "hex", + "itertools 0.10.5", + "rand 0.4.6", + "rand 0.8.5", + "seq-macro", + "serde", + "serde_json", + "smallvec", + "zkevm_opcode_defs 1.3.2", +] + +[[package]] +name = "zkevm_circuits" +version = "1.5.0" +source = "git+https://github.com/matter-labs/era-zkevm_circuits.git?branch=v1.5.0#861f81029bf3a916dae55afa5bd7f82b2eaca98b" +dependencies = [ + "arrayvec 0.7.4", + "boojum", + "cs_derive", + "derivative", + "hex", + "itertools 0.10.5", + "rand 0.4.6", + "rand 0.8.5", + "seq-macro", + "serde", + "smallvec", + "zkevm_opcode_defs 1.5.0", +] + +[[package]] +name = "zkevm_opcode_defs" +version = "1.3.2" +source = "git+https://github.com/matter-labs/era-zkevm_opcode_defs.git?branch=v1.3.2#dffacadeccdfdbff4bc124d44c595c4a6eae5013" +dependencies = [ + "bitflags 2.5.0", + "blake2 0.10.6 (git+https://github.com/RustCrypto/hashes.git?rev=1f727ce37ff40fa0cce84eb8543a45bdd3ca4a4e)", + "ethereum-types", + "k256 0.11.6", + "lazy_static", + "sha2 0.10.6", + "sha3 0.10.6", +] + +[[package]] +name = "zkevm_opcode_defs" +version = "1.5.0" +source = "git+https://github.com/matter-labs/era-zkevm_opcode_defs.git?branch=v1.5.0#109d9f734804a8b9dc0531c0b576e2a0f55a40de" +dependencies = [ + "bitflags 2.5.0", + "blake2 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)", + "ethereum-types", + "k256 0.13.2", + "lazy_static", + "p256", + "serde", + "sha2 0.10.8", + "sha3 0.10.8", +] + +[[package]] +name = "zksync_basic_types" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "anyhow", + "chrono", + "num_enum 0.7.2", + "serde", + "serde_json", + "strum", + "url", + "web3", +] + +[[package]] +name = "zksync_concurrency" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-consensus.git?rev=92ecb2d5d65e3bc4a883dacd18d0640e86576c8c#92ecb2d5d65e3bc4a883dacd18d0640e86576c8c" +dependencies = [ + "anyhow", + "once_cell", + "pin-project", + "rand 0.8.5", + "sha3 0.10.8", + "thiserror", + "time", + "tokio", + "tracing", + "tracing-subscriber", + "vise", +] + +[[package]] +name = "zksync_config" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "anyhow", + "rand 0.8.5", + "serde", + "url", + "zksync_basic_types", + "zksync_consensus_utils", + "zksync_crypto_primitives", +] + +[[package]] +name = "zksync_consensus_utils" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-consensus.git?rev=92ecb2d5d65e3bc4a883dacd18d0640e86576c8c#92ecb2d5d65e3bc4a883dacd18d0640e86576c8c" +dependencies = [ + "rand 0.8.5", + "thiserror", + "zksync_concurrency", +] + +[[package]] +name = "zksync_contracts" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "envy", + "ethabi", + "hex", + "once_cell", + "serde", + "serde_json", + "zksync_utils", +] + +[[package]] +name = "zksync_crypto" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "blake2 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)", + "hex", + "once_cell", + "serde", + "sha2 0.10.8", + "thiserror", + "zksync_basic_types", +] + +[[package]] +name = "zksync_crypto_primitives" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "anyhow", + "hex", + "rand 0.8.5", + "secp256k1", + "serde", + "serde_json", + "thiserror", + "web3", + "zksync_basic_types", + "zksync_utils", +] + +[[package]] +name = "zksync_mini_merkle_tree" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "once_cell", + "zksync_basic_types", + "zksync_crypto", +] + +[[package]] +name = "zksync_protobuf" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-consensus.git?rev=92ecb2d5d65e3bc4a883dacd18d0640e86576c8c#92ecb2d5d65e3bc4a883dacd18d0640e86576c8c" +dependencies = [ + "anyhow", + "bit-vec", + "once_cell", + "prost 0.12.4", + "prost-reflect", + "quick-protobuf", + "rand 0.8.5", + "serde", + "serde_json", + "serde_yaml", + "zksync_concurrency", + "zksync_consensus_utils", + "zksync_protobuf_build", +] + +[[package]] +name = "zksync_protobuf_build" +version = "0.1.0" +source = "git+https://github.com/matter-labs/era-consensus.git?rev=92ecb2d5d65e3bc4a883dacd18d0640e86576c8c#92ecb2d5d65e3bc4a883dacd18d0640e86576c8c" +dependencies = [ + "anyhow", + "heck 0.5.0", + "prettyplease", + "proc-macro2 1.0.82", + "prost-build", + "prost-reflect", + "protox", + "quote 1.0.33", + "syn 2.0.33", +] + +[[package]] +name = "zksync_system_constants" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "once_cell", + "zksync_basic_types", + "zksync_utils", +] + +[[package]] +name = "zksync_types" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "anyhow", + "blake2 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono", + "derive_more 1.0.0-beta.6", + "hex", + "itertools 0.10.5", + "num", + "num_enum 0.7.2", + "once_cell", + "prost 0.12.4", + "rlp", + "secp256k1", + "serde", + "serde_json", + "strum", + "thiserror", + "zksync_basic_types", + "zksync_config", + "zksync_contracts", + "zksync_crypto_primitives", + "zksync_mini_merkle_tree", + "zksync_protobuf", + "zksync_protobuf_build", + "zksync_system_constants", + "zksync_utils", +] + +[[package]] +name = "zksync_utils" +version = "0.1.0" +source = "git+https://github.com/matter-labs/zksync-era.git?branch=main#29a4ffc6b9420590f32a9e1d1585ebffb95eeb6c" +dependencies = [ + "anyhow", + "bigdecimal", + "futures", + "hex", + "itertools 0.10.5", + "num", + "reqwest", + "serde", + "thiserror", + "tokio", + "tracing", + "vlog", + "zk_evm 1.3.3", + "zksync_basic_types", +] [[package]] name = "zksync_verifier_contract_generator" version = "0.1.0" dependencies = [ + "circuit_definitions 1.5.0", + "crypto", "handlebars", + "hex", "lazy_static", "serde_derive", "serde_json", + "sha3 0.10.8", "structopt", ] + +[[package]] +name = "zstd" +version = "0.11.2+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "5.0.2+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.10+zstd.1.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/tools/Cargo.toml b/tools/Cargo.toml index 204440623..b32b52bdb 100644 --- a/tools/Cargo.toml +++ b/tools/Cargo.toml @@ -4,10 +4,15 @@ version = "0.1.0" edition = "2021" [dependencies] +zksync_crypto = { git = "https://github.com/matter-labs/era-boojum-validator-cli", branch = "main", package = "crypto" } +circuit_definitions = {git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch="v1.5.0"} + serde_derive = "1.0" serde_json = "1.0" lazy_static = "1.4" structopt = "0.3.26" handlebars = "4.4.0" +sha3 = "0.10.8" +hex = "0.4.3" [workspace] diff --git a/tools/README.md b/tools/README.md index 081ab8d70..a49cf4c73 100644 --- a/tools/README.md +++ b/tools/README.md @@ -7,3 +7,11 @@ To generate the verifier from the scheduler key in 'data' directory, just run: ```shell cargo run --bin zksync_verifier_contract_generator --release -- --input_path data/scheduler_key.json --output_path ../l1-contracts/contracts/state-transition/Verifier.sol ``` + +## L2 mode + +At the time of this writing, `modexp` precompile is not present on zkSync Era. In order to deploy the verifier on top of a ZK Chain, a different version has to be used with custom implementation of modular exponentiation. + +```shell +cargo run --bin zksync_verifier_contract_generator --release -- --input_path data/scheduler_key.json --output_path ../l2-contracts/contracts/verifier/Verifier.sol --l2_mode +``` diff --git a/tools/data/verifier_contract_template.txt b/tools/data/verifier_contract_template.txt index bef14e662..23249c9ab 100644 --- a/tools/data/verifier_contract_template.txt +++ b/tools/data/verifier_contract_template.txt @@ -8,13 +8,14 @@ import {IVerifier} from "./chain-interfaces/IVerifier.sol"; /// @author Matter Labs /// @notice Modified version of the Permutations over Lagrange-bases for Oecumenical Noninteractive arguments of /// Knowledge (PLONK) verifier. -/// Modifications have been made to optimize the proof system for zkSync hyperchain circuits. +/// Modifications have been made to optimize the proof system for ZK chain circuits. +/// @dev Contract was generated from a verification key with a hash of 0x{{vk_hash}} /// @dev It uses a custom memory layout inside the inline assembly block. Each reserved memory cell is declared in the /// constants below. /// @dev For a better understanding of the verifier algorithm please refer to the following papers: /// * Original Plonk Article: https://eprint.iacr.org/2019/953.pdf /// * Original LookUp Article: https://eprint.iacr.org/2020/315.pdf -/// * Plonk for zkSync v1.1: https://github.com/matter-labs/solidity_plonk_verifier/raw/recursive/bellman_vk_codegen_recursive/RecursivePlonkUnrolledForEthereum.pdf +/// * Plonk for ZKsync v1.1: https://github.com/matter-labs/solidity_plonk_verifier/raw/recursive/bellman_vk_codegen_recursive/RecursivePlonkUnrolledForEthereum.pdf /// The notation used in the code is the same as in the papers. /* solhint-enable max-line-length */ contract Verifier is IVerifier { @@ -277,8 +278,7 @@ contract Verifier is IVerifier { /// @inheritdoc IVerifier function verify( uint256[] calldata, // _publicInputs - uint256[] calldata, // _proof - uint256[] calldata // _recursiveAggregationInput + uint256[] calldata // _proof ) public view virtual returns (bool) { // No memory was accessed yet, so keys can be loaded into the right place and not corrupt any other memory. _loadVerificationKey(); @@ -308,18 +308,7 @@ contract Verifier is IVerifier { } /// @dev Performs modular exponentiation using the formula (value ^ power) mod R_MOD. - function modexp(value, power) -> res { - mstore(0x00, 0x20) - mstore(0x20, 0x20) - mstore(0x40, 0x20) - mstore(0x60, value) - mstore(0x80, power) - mstore(0xa0, R_MOD) - if iszero(staticcall(gas(), 5, 0, 0xc0, 0x00, 0x20)) { - revertWithMessage(24, "modexp precompile failed") - } - res := mload(0x00) - } + {{modexp_function}} /// @dev Performs a point multiplication operation and stores the result in a given memory destination. function pointMulIntoDest(point, s, dest) { @@ -457,7 +446,17 @@ contract Verifier is IVerifier { // 2. Load the proof (except for the recursive part) offset := calldataload(0x24) let proofLengthInWords := calldataload(add(offset, 0x04)) - isValid := and(eq(proofLengthInWords, 44), isValid) + + // Check the proof length depending on whether the recursive part is present + let expectedProofLength + switch mload(VK_RECURSIVE_FLAG_SLOT) + case 0 { + expectedProofLength := 44 + } + default { + expectedProofLength := 48 + } + isValid := and(eq(proofLengthInWords, expectedProofLength), isValid) // PROOF_STATE_POLYS_0 { @@ -604,21 +603,13 @@ contract Verifier is IVerifier { } // 3. Load the recursive part of the proof - offset := calldataload(0x44) - let recursiveProofLengthInWords := calldataload(add(offset, 0x04)) - - switch mload(VK_RECURSIVE_FLAG_SLOT) - case 0 { - // recursive part should be empty - isValid := and(iszero(recursiveProofLengthInWords), isValid) - } - default { + if mload(VK_RECURSIVE_FLAG_SLOT) { // recursive part should be consist of 2 points - isValid := and(eq(recursiveProofLengthInWords, 4), isValid) + // PROOF_RECURSIVE_PART_P1 { - let x := mod(calldataload(add(offset, 0x024)), Q_MOD) - let y := mod(calldataload(add(offset, 0x044)), Q_MOD) + let x := mod(calldataload(add(offset, 0x5a4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x5c4)), Q_MOD) let xx := mulmod(x, x, Q_MOD) isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) mstore(PROOF_RECURSIVE_PART_P1_X_SLOT, x) @@ -626,8 +617,8 @@ contract Verifier is IVerifier { } // PROOF_RECURSIVE_PART_P2 { - let x := mod(calldataload(add(offset, 0x064)), Q_MOD) - let y := mod(calldataload(add(offset, 0x084)), Q_MOD) + let x := mod(calldataload(add(offset, 0x5e4)), Q_MOD) + let y := mod(calldataload(add(offset, 0x604)), Q_MOD) let xx := mulmod(x, x, Q_MOD) isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) mstore(PROOF_RECURSIVE_PART_P2_X_SLOT, x) diff --git a/tools/rust-toolchain b/tools/rust-toolchain deleted file mode 100644 index 0834888f5..000000000 --- a/tools/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -1.72.0 diff --git a/tools/rust-toolchain.toml b/tools/rust-toolchain.toml new file mode 100644 index 000000000..1388c20ff --- /dev/null +++ b/tools/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly-2024-02-04" diff --git a/tools/src/main.rs b/tools/src/main.rs index 3ac8c3ff1..4da69d921 100644 --- a/tools/src/main.rs +++ b/tools/src/main.rs @@ -1,5 +1,9 @@ +use circuit_definitions::snark_wrapper::franklin_crypto::bellman::plonk::better_better_cs::setup::VerificationKey; +use circuit_definitions::snark_wrapper::franklin_crypto::bellman::pairing::bn256::Bn256; +use circuit_definitions::circuit_definitions::aux_layer::ZkSyncSnarkWrapperCircuit; use handlebars::Handlebars; use serde_json::json; +use zksync_crypto::calculate_verification_key_hash; use std::collections::HashMap; use std::error::Error; use std::fs; @@ -111,6 +115,10 @@ struct Opt { /// Output path to verifier contract file. #[structopt(short = "o", long = "output_path", default_value = "data/Verifier.sol")] output_path: String, + + /// The Verifier is to be compiled for an L2 network, where modexp precompile is not available. + #[structopt(short = "l2", long = "l2_mode")] + l2_mode: bool, } fn main() -> Result<(), Box> { @@ -122,8 +130,16 @@ fn main() -> Result<(), Box> { let verifier_contract_template = fs::read_to_string("data/verifier_contract_template.txt")?; + let verification_key = fs::read_to_string(&opt.input_path) + .expect(&format!("Unable to read from {}", &opt.input_path)); + + let verification_key: VerificationKey = + serde_json::from_str(&verification_key).unwrap(); + + let vk_hash = hex::encode(calculate_verification_key_hash(verification_key).to_fixed_bytes()); + let verifier_contract_template = - insert_residue_elements_and_commitments(&verifier_contract_template, &vk)?; + insert_residue_elements_and_commitments(&verifier_contract_template, &vk, &vk_hash, opt.l2_mode)?; let mut file = File::create(opt.output_path)?; @@ -134,6 +150,8 @@ fn main() -> Result<(), Box> { fn insert_residue_elements_and_commitments( template: &str, vk: &HashMap, + vk_hash: &str, + l2_mode: bool, ) -> Result> { let reg = Handlebars::new(); let residue_g2_elements = generate_residue_g2_elements(vk); @@ -142,10 +160,16 @@ fn insert_residue_elements_and_commitments( let verifier_contract_template = template.replace("{{residue_g2_elements}}", &residue_g2_elements); + let modexp_function = get_modexp_function(l2_mode); + let verifier_contract_template = verifier_contract_template.replace("{{modexp_function}}", &modexp_function); + + Ok(reg.render_template( &verifier_contract_template, &json!({"residue_g2_elements": residue_g2_elements, - "commitments": commitments}), + "commitments": commitments, + "vk_hash": vk_hash, + "modexp_function": modexp_function}), )?) } @@ -320,3 +344,37 @@ fn generate_residue_g2_elements(vk: &HashMap) -> String { residue_g2_elements } + + +fn get_modexp_function(l2_mode: bool) -> String { + if l2_mode { + r#"function modexp(value, power) -> res { + res := 1 + for { + + } gt(power, 0) { + + } { + if mod(power, 2) { + res := mulmod(res, value, R_MOD) + } + value := mulmod(value, value, R_MOD) + power := shr(1, power) + } + }"#.to_string() + } else { + r#"function modexp(value, power) -> res { + mstore(0x00, 0x20) + mstore(0x20, 0x20) + mstore(0x40, 0x20) + mstore(0x60, value) + mstore(0x80, power) + mstore(0xa0, R_MOD) + if iszero(staticcall(gas(), 5, 0, 0xc0, 0x00, 0x20)) { + revertWithMessage(24, "modexp precompile failed") + } + res := mload(0x00) + }"#.to_string() + } +} +