diff --git a/.editorconfig b/.editorconfig index c05a3529f..24511d58c 100644 --- a/.editorconfig +++ b/.editorconfig @@ -6,7 +6,7 @@ end_of_line = lf insert_final_newline = true charset = utf-8 indent_style = space -indent_size = 4 - -[*.{js,yml,json,cjs,ts}] indent_size = 2 + +[*.sol] +indent_size = 4 diff --git a/.env.example b/.env.example new file mode 100644 index 000000000..240caf6bb --- /dev/null +++ b/.env.example @@ -0,0 +1,46 @@ +LOCAL_RPC_URL=http://localhost:8555 +LOCAL_LOCATOR_ADDRESS= +LOCAL_AGENT_ADDRESS= +LOCAL_VOTING_ADDRESS= +LOCAL_EASY_TRACK_EXECUTOR_ADDRESS= +LOCAL_ACCOUNTING_ORACLE_ADDRESS= +LOCAL_ACL_ADDRESS= +LOCAL_BURNER_ADDRESS= +LOCAL_DEPOSIT_SECURITY_MODULE_ADDRESS= +LOCAL_EL_REWARDS_VAULT_ADDRESS= +LOCAL_HASH_CONSENSUS_ADDRESS= +LOCAL_KERNEL_ADDRESS= +LOCAL_LEGACY_ORACLE_ADDRESS= +LOCAL_LIDO_ADDRESS= +LOCAL_NOR_ADDRESS= +LOCAL_ORACLE_DAEMON_CONFIG_ADDRESS= +LOCAL_ORACLE_REPORT_SANITY_CHECKER_ADDRESS= +LOCAL_SDVT_ADDRESS= +LOCAL_STAKING_ROUTER_ADDRESS= +LOCAL_VALIDATORS_EXIT_BUS_ORACLE_ADDRESS= +LOCAL_WITHDRAWAL_QUEUE_ADDRESS= +LOCAL_WITHDRAWAL_VAULT_ADDRESS= + +# https://docs.lido.fi/deployed-contracts +MAINNET_RPC_URL=http://localhost:8545 +MAINNET_LOCATOR_ADDRESS=0xC1d0b3DE6792Bf6b4b37EccdcC24e45978Cfd2Eb +MAINNET_AGENT_ADDRESS=0x3e40D73EB977Dc6a537aF587D48316feE66E9C8c +MAINNET_VOTING_ADDRESS=0x2e59A20f205bB85a89C53f1936454680651E618e +MAINNET_EASY_TRACK_EXECUTOR_ADDRESS=0xFE5986E06210aC1eCC1aDCafc0cc7f8D63B3F977 +MAINNET_ACCOUNTING_ORACLE_ADDRESS= +MAINNET_ACL_ADDRESS= +MAINNET_BURNER_ADDRESS= +MAINNET_DEPOSIT_SECURITY_MODULE_ADDRESS= +MAINNET_EL_REWARDS_VAULT_ADDRESS= +MAINNET_HASH_CONSENSUS_ADDRESS= +MAINNET_KERNEL_ADDRESS= +MAINNET_LEGACY_ORACLE_ADDRESS= +MAINNET_LIDO_ADDRESS= +MAINNET_NOR_ADDRESS= +MAINNET_ORACLE_DAEMON_CONFIG_ADDRESS= +MAINNET_ORACLE_REPORT_SANITY_CHECKER_ADDRESS= +MAINNET_SDVT_ADDRESS= +MAINNET_STAKING_ROUTER_ADDRESS= +MAINNET_VALIDATORS_EXIT_BUS_ORACLE_ADDRESS= +MAINNET_WITHDRAWAL_QUEUE_ADDRESS= +MAINNET_WITHDRAWAL_VAULT_ADDRESS= diff --git a/.eslintrc b/.eslintrc index e6356f462..66fbe62bb 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,17 +1,20 @@ { "parser": "@typescript-eslint/parser", "extends": ["plugin:@typescript-eslint/recommended", "prettier"], - "parserOptions": { "ecmaVersion": 2022, "sourceType": "module", "project": ["./tsconfig.json"] }, + "parserOptions": { + "ecmaVersion": 2022, "sourceType": "module", "project": ["./tsconfig.json"] + }, "plugins": ["no-only-tests", "simple-import-sort"], "rules": { "@typescript-eslint/no-explicit-any": ["warn"], "@typescript-eslint/no-unused-vars": ["warn"], "@typescript-eslint/no-floating-promises": ["warn"], + "@typescript-eslint/no-shadow": ["error"], // prevents committing `describe.only` and `it.only` tests "no-only-tests/no-only-tests": "warn", + "no-shadow": "off", "simple-import-sort/imports": [ - "error", - { + "error", { "groups": [ ["^node:"], ["^\\u0000"], @@ -22,17 +25,14 @@ ["^test"], ["^../"], ["^./"], - ["^"], - ], - }, - ], + ["^"] + ] + } + ] }, "overrides": [ { - "files": ["./scripts/{**/,}*.js", "./test/{**/,}*.js"], - "env": { - "mocha": true, - }, - }, - ], + "files": ["./scripts/{**/,}*.js", "./test/{**/,}*.js"], "env": {"mocha": true} + } + ] } diff --git a/.github/workflows/analyse.yml b/.github/workflows/analyse.yml index 4b1467680..eaa1345e1 100644 --- a/.github/workflows/analyse.yml +++ b/.github/workflows/analyse.yml @@ -1,14 +1,14 @@ -name: Code Analysis +name: Analysis on: push: - branches: [ master, develop, repovation ] + branches: [master, develop, repovation] pull_request: - branches: [ master, develop, repovation ] + branches: [master, develop, repovation] jobs: slither: - name: Solidity code analysis + name: Slither runs-on: ubuntu-latest permissions: @@ -17,18 +17,9 @@ jobs: steps: - uses: actions/checkout@v4 - with: - persist-credentials: false - - - run: corepack enable - - uses: actions/setup-node@v4 - with: - node-version-file: .nvmrc - cache: "yarn" - - - name: Install dependencies - run: yarn install + - name: Common setup + uses: ./.github/workflows/setup # REVIEW: here and below steps taken from official guide # https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#caching-packages diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 92f802ef4..568130f3b 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -2,31 +2,26 @@ name: Coverage on: push: - branches: [ master, develop, repovation ] + branches: [master, develop, repovation] pull_request: - branches: [ master, develop, repovation ] + branches: [master, develop, repovation] jobs: coverage: - name: Solidity coverage + name: Hardhat runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - with: - persist-credentials: false - - - run: corepack enable - - uses: actions/setup-node@v4 - with: - node-version-file: .nvmrc - cache: "yarn" + - name: Common setup + uses: ./.github/workflows/setup - - name: Install dependencies - run: yarn install + # Remove the integration tests from the test suite, as they require a mainnet fork to run properly + - name: Remove integration tests + run: rm -rf test/integration - - name: Run Solidity coverage + - name: Collect coverage run: yarn test:coverage - name: Produce the coverage report @@ -37,5 +32,5 @@ jobs: diff: true diff-branch: master diff-storage: _core_coverage_reports - coverage-summary-title: "Code Coverage Summary" + coverage-summary-title: "Hardhat Unit Tests Coverage Summary" togglable-report: true diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index f07a8719b..bdc18a69e 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -1,54 +1,46 @@ name: Linters -on: [ push ] +on: [push] jobs: - lint: - name: Solidity and TypeScript linters + solhint: + name: Solhint runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - with: - persist-credentials: false - - run: corepack enable + - name: Common setup + uses: ./.github/workflows/setup - - uses: actions/setup-node@v4 - with: - node-version-file: .nvmrc - cache: "yarn" + - name: Run solhint + run: yarn lint:sol - - name: Install dependencies - run: yarn install + eslint: + name: ESLint + runs-on: ubuntu-latest - - name: Run Solidity linters - run: yarn lint:sol + steps: + - uses: actions/checkout@v4 + + - name: Common setup + uses: ./.github/workflows/setup - - name: Run TS linters + - name: Run eslint run: yarn lint:ts - types: - name: TypeScript types check + typescript: + name: TypeScript runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - with: - persist-credentials: false - - - run: corepack enable - - - uses: actions/setup-node@v4 - with: - node-version-file: .nvmrc - cache: "yarn" - - name: Install dependencies - run: yarn install + - name: Common setup + uses: ./.github/workflows/setup - name: Generate typechain types run: yarn hardhat compile - - name: Run TypeScript types check + - name: Run typescript types check run: yarn typecheck diff --git a/.github/workflows/setup/action.yml b/.github/workflows/setup/action.yml new file mode 100644 index 000000000..8c6cdc6e2 --- /dev/null +++ b/.github/workflows/setup/action.yml @@ -0,0 +1,19 @@ +name: Common setup + +on: workflow_call + +runs: + using: "composite" + steps: + - name: Enable corepack + shell: bash + run: corepack enable + + - uses: actions/setup-node@v4 + with: + node-version-file: .nvmrc + cache: yarn + + - name: Install dependencies + shell: bash + run: yarn install diff --git a/.github/workflows/tests-integration.yml b/.github/workflows/tests-integration.yml new file mode 100644 index 000000000..c06e32303 --- /dev/null +++ b/.github/workflows/tests-integration.yml @@ -0,0 +1,31 @@ +name: Integration Tests + +on: [push] + +jobs: + test_hardhat_integration: + name: Hardhat + runs-on: ubuntu-latest + timeout-minutes: 120 + + services: + hardhat-node: + image: feofanov/hardhat-node + ports: + - 8545:8545 + env: + ETH_RPC_URL: "${{ secrets.ETH_RPC_URL }}" + + steps: + - uses: actions/checkout@v4 + + - name: Common setup + uses: ./.github/workflows/setup + + - name: Set env + run: cp .env.example .env + + - name: Run integration tests + run: yarn test:integration:fork + env: + LOG_LEVEL: debug diff --git a/.github/workflows/tests-unit.yml b/.github/workflows/tests-unit.yml new file mode 100644 index 000000000..c4bd3d4d3 --- /dev/null +++ b/.github/workflows/tests-unit.yml @@ -0,0 +1,37 @@ +name: Unit Tests + +on: [push] + +jobs: + test_hardhat_unit: + name: Hardhat + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Common setup + uses: ./.github/workflows/setup + + - name: Run unit tests + run: yarn test + + test_foundry_fuzzing: + name: Foundry (fuzzing & invariant) + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Common setup + uses: ./.github/workflows/setup + + - name: Install foundry + uses: foundry-rs/foundry-toolchain@v1 + # Use a specific version of Foundry in case nightly is broken + # https://github.com/foundry-rs/foundry/releases + # with: + # version: nightly-54d8510c0f2b0f791f4c5ef99866c6af99b7606a + + - name: Run fuzzing and invariant tests + run: forge test -vvv diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 7c50cdbc2..000000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Tests - -on: [ push ] - -jobs: - test_hardhat: - name: Hardhat Solidity tests - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - run: corepack enable - - - uses: actions/setup-node@v4 - with: - node-version-file: .nvmrc - cache: "yarn" - - - name: Install dependencies - run: yarn install - - - name: Run Hardhat Solidity tests - run: yarn test - - test_foundry: - name: Foundry Solidity tests - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@v1 - # Use a specific version of Foundry in case nightly is broken - # https://github.com/foundry-rs/foundry/releases - # with: - # version: nightly-54d8510c0f2b0f791f4c5ef99866c6af99b7606a - - - run: corepack enable - - - uses: actions/setup-node@v4 - with: - node-version-file: .nvmrc - cache: "yarn" - - - name: Install dependencies - run: yarn install - - - name: Run tests - run: forge test -vvv diff --git a/.prettierrc b/.prettierrc index fc1cb78e5..8618a9629 100644 --- a/.prettierrc +++ b/.prettierrc @@ -2,5 +2,6 @@ "semi": true, "singleQuote": false, "printWidth": 120, - "tabWidth": 2 + "tabWidth": 2, + "quoteProps": "consistent" } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7956eac35..dd89635a7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,8 @@ # Lido Contribution Guide -Welcome to the Lido Contribution Guide! Thank you for your interest in contributing to Lido! Join our community of contributors who are passionate about advancing liquid staking. Whether you're fixing a bug, adding a new feature, or improving the documentation, your contribution is valuable and your effort to make Lido better is appreciated. +Welcome to the Lido Contribution Guide! Thank you for your interest in contributing to Lido! Join our community of +contributors who are passionate about advancing liquid staking. Whether you're fixing a bug, adding a new feature, or +improving the documentation, your contribution is valuable and your effort to make Lido better is appreciated. ## Ways to Contribute @@ -8,21 +10,32 @@ Welcome to the Lido Contribution Guide! Thank you for your interest in contribut Issues are a great way to contribute to the project by reporting bugs or suggesting enhancements. -- **Bug Reports**. If you encounter a bug, please report it using the GitHub issues feature. Check first to ensure the bug hasn't already been reported. If it has, you can contribute further by adding more detail to the existing report. _Note that this only relates to off-chain code (tests, scripts, etc.), for bugs in contracts and protocol vulnerabilities, please refer to [Bug Bounty](/README.md#bug-bounty)_. +- **Bug Reports**. If you encounter a bug, please report it using the GitHub issues feature. Check first to ensure the + bug hasn't already been reported. If it has, you can contribute further by adding more detail to the existing report. + _Note that this only relates to off-chain code (tests, scripts, etc.), for bugs in contracts and protocol + vulnerabilities, please refer to [Bug Bounty](/README.md#bug-bounty)_. -- **Feature Requests**: Have an idea for a new feature or an improvement to an existing one? Submit a feature request through the GitHub issues, detailing your proposed enhancements and how they would benefit the Lido Finance Core. +- **Feature Requests**: Have an idea for a new feature or an improvement to an existing one? Submit a feature request + through the GitHub issues, detailing your proposed enhancements and how they would benefit the Lido Finance Core. ### Improving Documentation -Good documentation is crucial for any project. If you have suggestions for improving the documentation, or if you've noticed an omission or error, making these corrections is a significant contribution. Whether it's a typo, additional examples, or clearer explanations, your help in making the documentation more accessible and understandable is highly appreciated. +Good documentation is crucial for any project. If you have suggestions for improving the documentation, or if you've +noticed an omission or error, making these corrections is a significant contribution. Whether it's a typo, additional +examples, or clearer explanations, your help in making the documentation more accessible and understandable is highly +appreciated. For expansive documentation, visit the [Lido Docs repo](https://github.com/lidofinance/docs). ### Contributing to codebase -Contributing by resolving open issues is a valuable way to help improve the project. Look through the existing issues for something that interests you or matches your expertise. Don't hesitate to ask for more information or clarification if needed before starting. If you're interested in improving tooling and CI in this repository, consider opening a feature request issue first to discuss it with the community of contributors. +Contributing by resolving open issues is a valuable way to help improve the project. Look through the existing issues +for something that interests you or matches your expertise. Don't hesitate to ask for more information or clarification +if needed before starting. If you're interested in improving tooling and CI in this repository, consider opening a +feature request issue first to discuss it with the community of contributors. -If you have a bigger idea on how to improve the protocol, consider publishing your proposal to [Lido Forum](https://research.lido.fi/). +If you have a bigger idea on how to improve the protocol, consider publishing your proposal +to [Lido Forum](https://research.lido.fi/). ## Getting started @@ -33,7 +46,8 @@ If you have a bigger idea on how to improve the protocol, consider publishing yo - [Foundry](https://book.getfoundry.sh/) latest available version > [!NOTE] -> On macOS with Homebrew it is recommended to install Node.js using [`n`](https://github.com/tj/n) or [`nvm`](https://github.com/nvm-sh/nvm) version managers. +> On macOS with Homebrew it is recommended to install Node.js using [`n`](https://github.com/tj/n) +> or [`nvm`](https://github.com/nvm-sh/nvm) version managers. > Example setup process using `n` package manager for zsh users: > > ``` @@ -74,27 +88,38 @@ WIP All contributions must follow the established conventions: -1. All Solidity code must be autoformatted using Solhint. Contracts largely follow the [Official Solidity Guide](https://docs.soliditylang.org/en/latest/style-guide.html) with some exceptions. When writing contracts, refer to existing contracts for conventions, naming patterns, formatting, etc. -2. All TypeScript code must be autoformatted using ESLint. When writing tests and scripts, please refer to existing codebase. +1. All Solidity code must be autoformatted using Solhint. Contracts largely follow + the [Official Solidity Guide](https://docs.soliditylang.org/en/latest/style-guide.html) with some exceptions. When + writing contracts, refer to existing contracts for conventions, naming patterns, formatting, etc. +2. All TypeScript code must be autoformatted using ESLint. When writing tests and scripts, please refer to existing + codebase. 3. Commit messages must follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) format. -The repository includes a commit hook that checks your code and commit messages, resolve any issues before submitting a pull request. +The repository includes a commit hook that checks your code and commit messages, resolve any issues before submitting a +pull request. ## Branches ### `master` -The production branch of the protocol and the default branch of the repository. The [deployed protocol contracts](https://docs.lido.fi/deployed-contracts/) must match what is stored in the `/contracts` directory. Pull requests to `master` must originate from `develop` branch and have at least one approving review before merging. +The production branch of the protocol and the default branch of the repository. +The [deployed protocol contracts](https://docs.lido.fi/deployed-contracts/) must match what is stored in +the `/contracts` directory. Pull requests to `master` must originate from `develop` branch and have at least one +approving review before merging. ### `develop` -The development branch. All pull requests to `master` must be submitted to `develop` first for peer review. If appropriate, delete the feature branch after merging to `develop`. +The development branch. All pull requests to `master` must be submitted to `develop` first for peer review. If +appropriate, delete the feature branch after merging to `develop`. ## Repository structure ### Contracts -All production contracts are located in `/contracts` in the root of the project. The subdirectory names indicate the Solidity version of the contracts inside, e.g. the contracts in `/contracts/0.4.24` are all written in Solidity v0.4.24. Common interfaces and libraries shared by contracts with different versions are located in `/contracts/common` subdirectory. +All production contracts are located in `/contracts` in the root of the project. The subdirectory names indicate the +Solidity version of the contracts inside, e.g. the contracts in `/contracts/0.4.24` are all written in Solidity v0.4.24. +Common interfaces and libraries shared by contracts with different versions are located in `/contracts/common` +subdirectory. ### Tests @@ -104,17 +129,56 @@ This repository features a Hardhat-Foundry dual setup: - Foundry's anvil is faster than the Hardhat Network; - Foundry fuzzing capabilities allows for a better edge-case coverage. +#### Tracing + +`hardhat-tracer` is used to trace contract calls and state changes during tests. +Full scale transaction tracing is disabled by default because it can significantly slow down the tests. + +To enable tracing, you need wrap the code you want to trace with `Tracer.enable()` and `Tracer.disable()` functions and +run the tests with commands that have the `:trace` or `:fulltrace` postfix. + +```typescript +import { Tracer } from 'test/suite'; + +describe('MyContract', () => { + it('should do something', async () => { + Tracer.enable(); + // code to trace + Tracer.disable(); + }); +}); +``` + +And then run the tests with the following commands: + +```bash +yarn test:trace # Run all tests with trace logging (calls only) +yarn test:fulltrace # Run all tests with full trace logging (calls and storage ops) +yarn test:integration:trace # Run all integration tests with trace logging +yarn test:integration:fulltrace # Run all integration tests with full trace logging +``` + +> [!NOTE] +> Tracing is not supported in Foundry tests and integration tests other that Hardhat mainnet fork tests. + #### Hardhat -Hardhat tests are all located in `/tests` in the root of the project. -Each subdirectory name corresponds to the version of the contract being tested, mirroring the `/contracts` directory +Hardhat tests are all located in ` / tests` in the root of the project. +Each subdirectory name corresponds to the version of the contract being tested, mirroring the ` / contracts` directory structure. Integration, regression and other non-unit tests are placed into corresponding subdirectories, -e.g. `/tests/integration/`, `/tests/regression`, etc. +e.g. ` / tests / integration / `, ` / tests / regression`, etc. + +```bash +yarn test # Run all tests in parallel +yarn test:sequential # Run all tests sequentially +yarn test:trace # Run all tests with trace logging (see Tracing section) +yarn test:watch # Run all tests in watch mode +``` #### Foundry Foundry's Solidity tests are used only for fuzzing library contracts or functions performing complex calculations -or byte juggling. Solidity tests are located under `/tests` and in the appropriate subdirectories. Naming conventions +or byte juggling. Solidity tests are located under ` / tests` and in the appropriate subdirectories. Naming conventions follow the Foundry's [documentation](https://book.getfoundry.sh/tutorials/best-practices#general-test-guidance): - for tests, postfix `.t.sol` is used (e.g., `MyContract.t.sol`) @@ -124,19 +188,78 @@ follow the Foundry's [documentation](https://book.getfoundry.sh/tutorials/best-p Following the convention of distinguishing Hardhat test files from Foundry-related files is essential to ensure the proper execution of Hardhat tests. +```bash +yarn test:foundry # Run all Foundry tests +``` + +#### Integration tests + +Integration tests are located in ` / tests / integration` in the root of the project. +These tests are used to verify the interaction between different contracts and their behavior in a real-world scenario. + +You can run integration tests in multiple ways, but for all of them, you need to have a `.env` file in the root of +the project (you can use `.env.example` as a template). + +##### Hardhat Mainnet Fork + +This is the most common way to run integration tests. It uses the Hardhat mainnet fork to simulate the mainnet +environment. Requires `HARDHAT_FORKING_URL` and `HARDHAT_FORKING_BLOCK_NUMBER` (optional) to be set in the `.env` file +along with `MAINNET_*` env variables (see `.env.example`). + +```bash +yarn test:integration # Run all integration tests +yarn test:integration:trace # Run all integration tests with trace logging (see Tracing section) +``` + +##### Local setup + +This method is used to run integration tests against a local scratch deployment ( +see [scratch-deploy.md](./docs/scratch-deploy.md)). +Requires `LOCAL_*` env variables to be set and a local deployment to be running on port `8555`. + +```bash +yarn test:integration:local +``` + +##### Any fork setup + +This method is used to run integration tests against any fork. Requires `MAINNET_*` env variables to be set in the +`.env` file and a fork to be running on port `8545`. + +```bash +yarn test:integration:fork +``` + +#### Coverage + +Project uses `hardhat-coverage` plugin to generate coverage reports. +Foundry tests are not included in the coverage. + +To generate coverage reports, run the following command: + +```bash +yarn test:coverage +``` + #### Mocks -The `/tests` directory also contains contract mocks and helpers which are placed in the `.../contracts` subdirectory, e.g. `/tests/0.4.24/contracts`. Mocks and helpers **DO NOT** have to be written using the version of Solidity of the contract being tested. For example, it is okay to have a mock contract written in Solidity v0.8.9 in `/tests/0.4.24/contracts`. +The `/tests` directory also contains contract mocks and helpers which are placed in the `.../contracts` subdirectory, +e.g. `/tests/0.4.24/contracts`. Mocks and helpers **DO NOT** have to be written using the version of Solidity of the +contract being tested. For example, it is okay to have a mock contract written in Solidity v0.8.9 +in `/tests/0.4.24/contracts`. ### Library -TypeScript utilities and helpers are located in `/lib` in the root of the project. When adding a new file to this directory, please re-export everything from the `/lib/index.ts` file to keep import statement clean. +TypeScript utilities and helpers are located in `/lib` in the root of the project. When adding a new file to this +directory, please re-export everything from the `/lib/index.ts` file to keep import statement clean. ### Typechain types -All typechain types are placed in `/typechain-types` in the root of the project. DO NOT manually edit in this directory. These types are autogenerated on each compilation. +All typechain types are placed in `/typechain-types` in the root of the project. DO NOT manually edit in this directory. +These types are autogenerated on each compilation. -There have been issues with IDEs failing to properly index this directory resulting in import errors. If you are experiencing similar issues, the solutions above should resolve them: +There have been issues with IDEs failing to properly index this directory resulting in import errors. If you are +experiencing similar issues, the solutions above should resolve them: - open the `/typechain-types/index.ts` file to force the IDE to index it; - delete the directory and re-compile `yarn hardhat compile --force`. diff --git a/globals.d.ts b/globals.d.ts new file mode 100644 index 000000000..c7d1611a0 --- /dev/null +++ b/globals.d.ts @@ -0,0 +1,64 @@ +declare namespace NodeJS { + export interface ProcessEnv { + /* forking url for hardhat internal node, required for tracing e.g. */ + HARDHAT_FORKING_URL?: string; + + /* logging verbosity */ + LOG_LEVEL?: "all" | "debug" | "info" | "warn" | "error" | "none"; + + /* flags for changing the behavior of the integration tests */ + INTEGRATION_SIMPLE_DVT_MODULE?: "on" | "off"; + + /** + * Network configuration for the protocol discovery. + */ + + /* for local development */ + LOCAL_RPC_URL: string; + LOCAL_LOCATOR_ADDRESS: string; + LOCAL_AGENT_ADDRESS: string; + LOCAL_VOTING_ADDRESS: string; + LOCAL_EASY_TRACK_EXECUTOR_ADDRESS: string; + LOCAL_ACCOUNTING_ORACLE_ADDRESS?: string; + LOCAL_ACL_ADDRESS?: string; + LOCAL_BURNER_ADDRESS?: string; + LOCAL_DEPOSIT_SECURITY_MODULE_ADDRESS?: string; + LOCAL_EL_REWARDS_VAULT_ADDRESS?: string; + LOCAL_HASH_CONSENSUS_ADDRESS?: string; + LOCAL_KERNEL_ADDRESS?: string; + LOCAL_LEGACY_ORACLE_ADDRESS?: string; + LOCAL_LIDO_ADDRESS?: string; + LOCAL_NOR_ADDRESS?: string; + LOCAL_ORACLE_DAEMON_CONFIG_ADDRESS?: string; + LOCAL_ORACLE_REPORT_SANITY_CHECKER_ADDRESS?: string; + LOCAL_SDVT_ADDRESS?: string; + LOCAL_STAKING_ROUTER_ADDRESS?: string; + LOCAL_VALIDATORS_EXIT_BUS_ORACLE_ADDRESS?: string; + LOCAL_WITHDRAWAL_QUEUE_ADDRESS?: string; + LOCAL_WITHDRAWAL_VAULT_ADDRESS?: string; + + /* for mainnet fork testing */ + MAINNET_RPC_URL: string; + MAINNET_LOCATOR_ADDRESS: string; + MAINNET_AGENT_ADDRESS: string; + MAINNET_VOTING_ADDRESS: string; + MAINNET_EASY_TRACK_EXECUTOR_ADDRESS: string; + MAINNET_ACCOUNTING_ORACLE_ADDRESS?: string; + MAINNET_ACL_ADDRESS?: string; + MAINNET_BURNER_ADDRESS?: string; + MAINNET_DEPOSIT_SECURITY_MODULE_ADDRESS?: string; + MAINNET_EL_REWARDS_VAULT_ADDRESS?: string; + MAINNET_HASH_CONSENSUS_ADDRESS?: string; + MAINNET_KERNEL_ADDRESS?: string; + MAINNET_LEGACY_ORACLE_ADDRESS?: string; + MAINNET_LIDO_ADDRESS?: string; + MAINNET_NOR_ADDRESS?: string; + MAINNET_ORACLE_DAEMON_CONFIG_ADDRESS?: string; + MAINNET_ORACLE_REPORT_SANITY_CHECKER_ADDRESS?: string; + MAINNET_SDVT_ADDRESS?: string; + MAINNET_STAKING_ROUTER_ADDRESS?: string; + MAINNET_VALIDATORS_EXIT_BUS_ORACLE_ADDRESS?: string; + MAINNET_WITHDRAWAL_QUEUE_ADDRESS?: string; + MAINNET_WITHDRAWAL_VAULT_ADDRESS?: string; + } +} diff --git a/hardhat.config.ts b/hardhat.config.ts index ce489969a..b37d10451 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -5,6 +5,7 @@ import "@nomicfoundation/hardhat-chai-matchers"; import "@nomicfoundation/hardhat-toolbox"; import "@typechain/hardhat"; +import "dotenv/config"; import "solidity-coverage"; import "tsconfig-paths/register"; import "hardhat-tracer"; @@ -37,10 +38,14 @@ function loadAccounts(networkName: string) { const config: HardhatUserConfig = { defaultNetwork: "hardhat", networks: { - local: { - url: RPC_URL, + "local": { + url: process.env.LOCAL_RPC_URL || RPC_URL, + }, + "mainnet-fork": { + url: process.env.MAINNET_RPC_URL || RPC_URL, + timeout: 20 * 60 * 1000, // 20 minutes }, - hardhat: { + "hardhat": { // setting base fee to 0 to avoid extra calculations doesn't work :( // minimal base fee is 1 for EIP-1559 // gasPrice: 0, @@ -55,7 +60,7 @@ const config: HardhatUserConfig = { }, forking: HARDHAT_FORKING_URL ? { url: HARDHAT_FORKING_URL } : undefined, }, - sepolia: { + "sepolia": { url: RPC_URL, chainId: 11155111, accounts: loadAccounts("sepolia"), @@ -115,6 +120,9 @@ const config: HardhatUserConfig = { }, ], }, + tracer: { + tasks: ["watch"], + }, typechain: { outDir: "typechain-types", target: "ethers-v6", @@ -132,6 +140,7 @@ const config: HardhatUserConfig = { }, mocha: { rootHooks: mochaRootHooks, + timeout: 20 * 60 * 1000, // 20 minutes }, warnings: { "@aragon/**/*": { diff --git a/lib/account.ts b/lib/account.ts index 4331403e3..cb945b2ef 100644 --- a/lib/account.ts +++ b/lib/account.ts @@ -11,8 +11,14 @@ export async function impersonate(address: string, balance?: bigint): Promise { + const networkName = await getNetworkName(); + + await ethers.provider.send(`${networkName}_setBalance`, [address, "0x" + bigintToHex(balance)]); +} diff --git a/lib/bigint-math.ts b/lib/bigint-math.ts new file mode 100644 index 000000000..692e65fdf --- /dev/null +++ b/lib/bigint-math.ts @@ -0,0 +1,8 @@ +/** + * NB: ATM, there is no native support for BigInt math in TS/JS, so we're using this workaround. + */ +export const BigIntMath = { + abs: (x: bigint) => (x < 0n ? -x : x), + min: (x: bigint, y: bigint) => (x < y ? x : y), + max: (x: bigint, y: bigint) => (x > y ? x : y), +}; diff --git a/lib/constants.ts b/lib/constants.ts index 606c6122b..a0ab6bc31 100644 --- a/lib/constants.ts +++ b/lib/constants.ts @@ -12,6 +12,10 @@ export const ERC721METADATA_INTERFACE_ID = "0x5b5e139f"; // 0x49064906 is magic number ERC4906 interfaceId as defined in the standard https://eips.ethereum.org/EIPS/eip-4906 export const ERC4906_INTERFACE_ID = "0x49064906"; +// HashConsensus farFutureEpoch: +// (2n ** 64n - 1n - GENESIS_TIME) / SECONDS_PER_SLOT / SLOTS_PER_EPOCH +export const HASH_CONSENSUS_FAR_FUTURE_EPOCH = 48038396021015343n; + // OZ Interfaces export const OZ_ACCESS_CONTROL_INTERFACE_ID = "0x7965db0b"; export const OZ_ACCESS_CONTROL_ENUMERABLE_INTERFACE_ID = "0x5a05180f"; diff --git a/lib/deploy.ts b/lib/deploy.ts index 02d2727aa..ae947e996 100644 --- a/lib/deploy.ts +++ b/lib/deploy.ts @@ -27,7 +27,7 @@ export async function makeTx( args: ConvertibleToString[], txParams: TxParams, ): Promise { - log.lineWithArguments(`${yl(contract.name)}[${contract.address}].${yl(funcName)}`, args); + log.withArguments(`${yl(contract.name)}[${contract.address}].${yl(funcName)}`, args); const tx = await contract.getFunction(funcName)(...args, txParams); log(`tx sent: ${tx.hash} (nonce ${tx.nonce})...`); @@ -54,7 +54,7 @@ async function getDeployTxParams(deployer: string) { maxFeePerGas: ethers.parseUnits(String(GAS_MAX_FEE), "gwei"), }; } else { - throw new Error('Must specify gas ENV vars: "GAS_PRIORITY_FEE" and "GAS_MAX_FEE" in gwei (like just "3")'); + throw new Error("Must specify gas ENV vars: \"GAS_PRIORITY_FEE\" and \"GAS_MAX_FEE\" in gwei (like just \"3\")"); } } @@ -108,7 +108,7 @@ export async function deployWithoutProxy( constructorArgs: ConvertibleToString[] = [], addressFieldName = "address", ): Promise { - log.lineWithArguments(`Deploying ${artifactName} (without proxy) with constructor args: `, constructorArgs); + log.withArguments(`Deploying ${artifactName} (without proxy) with constructor args: `, constructorArgs); const contract = await deployContract(artifactName, constructorArgs, deployer); @@ -129,10 +129,7 @@ export async function deployImplementation( deployer: string, constructorArgs: ConvertibleToString[] = [], ): Promise { - log.lineWithArguments( - `Deploying implementation for proxy of ${artifactName} with constructor args: `, - constructorArgs, - ); + log.withArguments(`Deploying implementation for proxy of ${artifactName} with constructor args: `, constructorArgs); const contract = await deployContract(artifactName, constructorArgs, deployer); updateObjectInState(nameInState, { @@ -154,10 +151,7 @@ export async function deployBehindOssifiableProxy( implementation: null | string = null, ) { if (implementation === null) { - log.lineWithArguments( - `Deploying implementation for proxy of ${artifactName} with constructor args: `, - constructorArgs, - ); + log.withArguments(`Deploying implementation for proxy of ${artifactName} with constructor args: `, constructorArgs); const contract = await deployContract(artifactName, constructorArgs, deployer); implementation = contract.address; } else { @@ -165,7 +159,7 @@ export async function deployBehindOssifiableProxy( } const proxyConstructorArgs = [implementation, proxyOwner, "0x"]; - log.lineWithArguments( + log.withArguments( `Deploying ${PROXY_CONTRACT_NAME} for ${artifactName} with constructor args: `, proxyConstructorArgs, ); diff --git a/lib/event.ts b/lib/event.ts index 2003db64d..09406645a 100644 --- a/lib/event.ts +++ b/lib/event.ts @@ -3,16 +3,71 @@ import { EventLog, Interface, InterfaceAbi, + Log, LogDescription, TransactionReceipt, } from "ethers"; +import { log } from "./log"; + +const parseEventLog = (entry: EventLog): LogDescription | null => { + try { + return entry.interface.parseLog(entry); + } catch (error) { + log.error(`Error parsing EventLog: ${(error as Error).message}`); + return null; + } +}; + +const parseWithInterfaces = (entry: Log, interfaces: Interface[]): LogDescription | null => { + for (const iface of interfaces) { + try { + const logDescription = iface.parseLog(entry); + if (logDescription) { + return logDescription; + } + } catch (error) { + log.error(`Error parsing log with interface: ${(error as Error).message}`); + } + } + return null; +}; + +const parseLogEntry = (entry: Log, interfaces: Interface[]): LogDescription | null => { + if (entry instanceof EventLog) { + return parseEventLog(entry); + } else if (interfaces) { + return parseWithInterfaces(entry, interfaces); + } + return null; +}; + +export function findEventsWithInterfaces(receipt: ContractTransactionReceipt, eventName: string, interfaces: Interface[]): LogDescription[] { + const events: LogDescription[] = []; + const notParsedLogs: Log[] = []; + + receipt.logs.forEach(entry => { + const logDescription = parseLogEntry(entry, interfaces); + if (logDescription) { + events.push(logDescription); + } else { + notParsedLogs.push(entry); + } + }); + + if (notParsedLogs.length > 0) { + // log.warning("The following logs could not be parsed:", notParsedLogs); + } + + return events.filter(e => e.name === eventName); +} + export function findEvents(receipt: ContractTransactionReceipt, eventName: string) { const events = []; - for (const log of receipt.logs) { - if (log instanceof EventLog && log.fragment.name === eventName) { - events.push(log); + for (const entry of receipt.logs) { + if (entry instanceof EventLog && entry.fragment.name === eventName) { + events.push(entry); } } @@ -23,9 +78,9 @@ export function findEventsWithAbi(receipt: TransactionReceipt, eventName: string const iface = new Interface(abi); const foundEvents = []; - for (const log of receipt.logs) { + for (const entry of receipt.logs) { try { - const event = iface.parseLog(log); + const event = iface.parseLog(entry); if (event && event.name == eventName) { foundEvents.push(event); } diff --git a/lib/index.ts b/lib/index.ts index 193d5a4f4..6b43a9b90 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -1,5 +1,6 @@ export * from "./account"; export * from "./address"; +export * from "./bigint-math"; export * from "./constants"; export * from "./contract"; export * from "./deploy"; @@ -19,5 +20,6 @@ export * from "./signing-keys"; export * from "./state-file"; export * from "./string"; export * from "./time"; +export * from "./transaction"; export * from "./type"; export * from "./units"; diff --git a/lib/log.ts b/lib/log.ts index 7e2d4626c..808a8d4d3 100644 --- a/lib/log.ts +++ b/lib/log.ts @@ -1,6 +1,8 @@ import chalk from "chalk"; import path from "path"; +import { TraceableTransaction } from "./type"; + export type ConvertibleToString = string | number | boolean | { toString(): string }; export const rd = (s: ConvertibleToString) => chalk.red(s); @@ -12,40 +14,73 @@ export const mg = (s: ConvertibleToString) => chalk.magenta(s); export const log = (...args: ConvertibleToString[]) => console.log(...args); +const INDENT = " "; + +const MIN_LINE_LENGTH = 4; +const LINE_LENGTH = 20; +const LONG_LINE_LENGTH = 40; + export const OK = gr("[✓]"); export const NOT_OK = rd("[×]"); -const INDENT = " "; -log.noEOL = (...args: ConvertibleToString[]) => { - process.stdout.write(args.toString() + " "); -}; +const LOG_LEVEL = process.env.LOG_LEVEL || "info"; -// TODO: add logging to file -log.success = (...args: ConvertibleToString[]) => { - console.log(OK, ...args); -}; +const _line = (length = LINE_LENGTH, minLength = LINE_LENGTH): string => "=".repeat(Math.max(length, minLength)); -log.error = (...args: ConvertibleToString[]) => { - console.error(NOT_OK, ...args); -}; +const _splitter = (minLength = LINE_LENGTH, ...args: ConvertibleToString[]) => { + if (minLength < MIN_LINE_LENGTH) minLength = MIN_LINE_LENGTH; -log.emptyLine = () => { - console.log(); -}; + console.error(cy(_line(0, minLength))); -log.scriptStart = (filename: string) => { - log.emptyLine(); - logWideSplitter(); - log(`Started script ${bl(path.basename(filename))}`); - logWideSplitter(); + if (args.length) { + console.error(...args); + } }; -log.scriptFinish = (filename: string) => { - log(`Finished running script ${bl(path.basename(filename))}`); +const _header = (minLength = 20, ...args: ConvertibleToString[]) => { + if (minLength < MIN_LINE_LENGTH) minLength = MIN_LINE_LENGTH; + + const title = args[0]?.toString().trim() ?? ""; + const totalLength = Math.max(title.length + 4, minLength); + + const line = _line(totalLength + 4, minLength); + const paddedTitle = title.padStart((totalLength + title.length) / 2).padEnd(totalLength); + + console.error(`\n${cy(line)}`); + console.error(`${cy("=")} ${mg(paddedTitle)} ${cy("=")}`); + console.error(`${cy(line)}`); + + if (args.length > 1) { + console.error(...args.slice(1).map((s) => s.toString())); + } }; -log.lineWithArguments = (firstLine: string, args: ConvertibleToString[]) => { - log.noEOL(`${firstLine}(`); +const _title = (title: string) => log(mg(title)); + +const _record = (label: string, value: ConvertibleToString) => log(`${chalk.grey(label)}: ${yl(value.toString())}`); + +// TODO: add logging to file + +// TODO: fix log levels + +log.noEOL = (...args: ConvertibleToString[]) => process.stdout.write(args.toString() + " "); + +log.success = (...args: ConvertibleToString[]) => console.log(OK, ...args); + +log.error = (...args: ConvertibleToString[]) => console.error(NOT_OK, ...args); + +log.splitter = (...args: ConvertibleToString[]) => _splitter(LINE_LENGTH, ...args); + +log.wideSplitter = (...args: ConvertibleToString[]) => _splitter(LONG_LINE_LENGTH, ...args); + +log.table = (...args: ConvertibleToString[]) => console.table(...args); + +log.emptyLine = () => console.log(); + +log.header = (...args: ConvertibleToString[]) => _header(LINE_LENGTH, ...args); + +log.withArguments = (firstLine: string, args: ConvertibleToString[]) => { + log.noEOL(`${firstLine.trim()} (`); if (args.length > 0) { log.emptyLine(); } @@ -55,50 +90,53 @@ log.lineWithArguments = (firstLine: string, args: ConvertibleToString[]) => { log(`)... `); }; -const _line = (length = 0, minLength = 20) => "".padStart(Math.max(length, minLength), "="); - -const _header = (minLength = 20, args: ConvertibleToString[]) => { - if (minLength < 4) minLength = 4; - const msg = ""; - if (args.length > 0 && typeof args[0] === "string") { - args[0].toString().padEnd(minLength - 4, " "); - args.shift(); - } - const line = _line(msg.length + 4, minLength); - console.error(`\n${cy(line)}\n${cy("=")} ${mg(msg)} ${cy("=")}\n${cy(line)}\n`); - if (args.length) { - console.error(...args); - } +log.scriptStart = (filename: string) => { + log.emptyLine(); + log.wideSplitter(); + log(`Started script ${bl(path.basename(filename))}`); + log.wideSplitter(); }; -const _splitter = (minLength = 20, ...args: ConvertibleToString[]) => { - if (minLength < 4) minLength = 4; - console.error(cy(_line(0, minLength))); - if (args.length) { - console.error(...args); - } +log.scriptFinish = (filename: string) => { + log(`Finished running script ${bl(path.basename(filename))}`); }; -export function logSplitter(...args: ConvertibleToString[]) { - _splitter(20, ...args); -} - -log.splitter = logSplitter; - -export function logWideSplitter(...args: ConvertibleToString[]) { - _splitter(40, ...args); -} - -log.wideSplitter = logWideSplitter; +log.done = (message: string) => { + log.success(message); + log.emptyLine(); +}; -function logHeader(...args: ConvertibleToString[]) { - _header(40, args); -} +log.debug = (title: string, records: Record) => { + if (LOG_LEVEL != "debug" && LOG_LEVEL != "all") return; -log.header = logHeader; + _title(title); + Object.keys(records).forEach((label) => _record(` ${label}`, records[label])); + log.emptyLine(); +}; -function logTable(...args: ConvertibleToString[]) { - console.table(...args); -} +log.warning = (title: string, ...args: ConvertibleToString[]): void => { + log(chalk.bold.yellow(title)); + args.forEach((arg) => log(arg)); + log.emptyLine(); +}; -log.table = logTable; +log.traceTransaction = (name: string, tx: TraceableTransaction) => { + const value = tx.value === "0.0" ? "" : `Value: ${yl(tx.value)} ETH`; + const from = `From: ${yl(tx.from)}`; + const to = `To: ${yl(tx.to)}`; + const gasPrice = `Gas price: ${yl(tx.gasPrice)} gwei`; + const gasLimit = `Gas limit: ${yl(tx.gasLimit)}`; + const gasUsed = `Gas used: ${yl(tx.gasUsed)} (${yl(tx.gasUsedPercent)})`; + const block = `Block: ${yl(tx.blockNumber)}`; + const nonce = `Nonce: ${yl(tx.nonce)}`; + + const color = tx.status ? gr : rd; + const status = `${color(name)} ${color(tx.status ? "confirmed" : "failed")}`; + + log(`Transaction sent:`, yl(tx.hash)); + log(` ${from} ${to} ${value}`); + log(` ${gasPrice} ${gasLimit} ${gasUsed}`); + log(` ${block} ${nonce}`); + log(` ${status}`); + log.emptyLine(); +}; diff --git a/lib/protocol/context.ts b/lib/protocol/context.ts new file mode 100644 index 000000000..0fae96c37 --- /dev/null +++ b/lib/protocol/context.ts @@ -0,0 +1,39 @@ +import { ContractTransactionReceipt } from "ethers"; + +import { ether, findEventsWithInterfaces, impersonate, log } from "lib"; + +import { discover } from "./discover"; +import { provision } from "./provision"; +import { ProtocolContext, ProtocolContextFlags, ProtocolSigners, Signer } from "./types"; + +const getSigner = async (signer: Signer, balance = ether("100"), signers: ProtocolSigners) => { + const signerAddress = signers[signer] ?? signer; + return impersonate(signerAddress, balance); +}; + +export const getProtocolContext = async (): Promise => { + const { contracts, signers } = await discover(); + const interfaces = Object.values(contracts).map(contract => contract.interface); + + // By default, all flags are "on" + const flags = { + withSimpleDvtModule: process.env.INTEGRATION_SIMPLE_DVT_MODULE !== "off", + } as ProtocolContextFlags; + + log.debug("Protocol context flags", { + "With simple DVT module": flags.withSimpleDvtModule, + }); + + const context = { + contracts, + signers, + interfaces, + flags, + getSigner: async (signer: Signer, balance?: bigint) => getSigner(signer, balance, signers), + getEvents: (receipt: ContractTransactionReceipt, eventName: string) => findEventsWithInterfaces(receipt, eventName, interfaces), + } as ProtocolContext; + + await provision(context); + + return context; +}; diff --git a/lib/protocol/discover.ts b/lib/protocol/discover.ts new file mode 100644 index 000000000..ee99d8de6 --- /dev/null +++ b/lib/protocol/discover.ts @@ -0,0 +1,179 @@ +import hre from "hardhat"; + +import { AccountingOracle, Lido, LidoLocator, StakingRouter } from "typechain-types"; + +import { batch, log } from "lib"; + +import { getNetworkConfig, ProtocolNetworkConfig } from "./networks"; +import { + AragonContracts, + ContractName, + ContractType, + CoreContracts, + HashConsensusContracts, + LoadedContract, + ProtocolContracts, + ProtocolSigners, + StakingModuleContracts, +} from "./types"; + +const guard = (address: string, env: string) => { + if (!address) throw new Error(`${address} address is not set, please set it in the environment variables: ${env}`); +}; + +const getDiscoveryConfig = async () => { + const config = await getNetworkConfig(hre.network.name); + if (!config) { + throw new Error(`Network ${hre.network.name} is not supported`); + } + + const locatorAddress = config.get("locator"); + const agentAddress = config.get("agentAddress"); + const votingAddress = config.get("votingAddress"); + const easyTrackExecutorAddress = config.get("easyTrackAddress"); + + guard(locatorAddress, config.env.locator); + guard(agentAddress, config.env.agentAddress); + guard(votingAddress, config.env.votingAddress); + guard(easyTrackExecutorAddress, config.env.easyTrackAddress); + + log.debug("Discovery config", { + "Network": hre.network.name, + "Locator address": locatorAddress, + "Agent address": agentAddress, + "Voting address": votingAddress, + "Easy track executor address": easyTrackExecutorAddress, + }); + + return config; +}; + +/** + * Load contract by name and address. + */ +const loadContract = async (name: Name, address: string) => { + const contract = (await hre.ethers.getContractAt(name, address)) as unknown as LoadedContract>; + contract.address = address; + return contract; +}; + +/** + * Load all Lido protocol foundation contracts. + */ +const getFoundationContracts = async (locator: LoadedContract, config: ProtocolNetworkConfig) => + (await batch({ + accountingOracle: loadContract( + "AccountingOracle", + config.get("accountingOracle") || await locator.accountingOracle(), + ), + depositSecurityModule: loadContract( + "DepositSecurityModule", + config.get("depositSecurityModule") || await locator.depositSecurityModule(), + ), + elRewardsVault: loadContract( + "LidoExecutionLayerRewardsVault", + config.get("elRewardsVault") || await locator.elRewardsVault(), + ), + legacyOracle: loadContract("LegacyOracle", config.get("legacyOracle") || await locator.legacyOracle()), + lido: loadContract("Lido", config.get("lido") || await locator.lido()), + oracleReportSanityChecker: loadContract( + "OracleReportSanityChecker", + config.get("oracleReportSanityChecker") || await locator.oracleReportSanityChecker(), + ), + burner: loadContract("Burner", config.get("burner") || await locator.burner()), + stakingRouter: loadContract("StakingRouter", config.get("stakingRouter") || await locator.stakingRouter()), + validatorsExitBusOracle: loadContract( + "ValidatorsExitBusOracle", + config.get("validatorsExitBusOracle") || await locator.validatorsExitBusOracle(), + ), + withdrawalQueue: loadContract( + "WithdrawalQueueERC721", + config.get("withdrawalQueue") || await locator.withdrawalQueue(), + ), + withdrawalVault: loadContract( + "WithdrawalVault", + config.get("withdrawalVault") || await locator.withdrawalVault(), + ), + oracleDaemonConfig: loadContract( + "OracleDaemonConfig", + config.get("oracleDaemonConfig") || await locator.oracleDaemonConfig(), + ), + })) as CoreContracts; + +/** + * Load Aragon contracts required for protocol. + */ +const getAragonContracts = async (lido: LoadedContract, config: ProtocolNetworkConfig) => { + const kernelAddress = config.get("kernel") || await lido.kernel(); + const kernel = await loadContract("Kernel", kernelAddress); + return (await batch({ + kernel: new Promise((resolve) => resolve(kernel)), // Avoiding double loading + acl: loadContract("ACL", config.get("acl") || await kernel.acl()), + })) as AragonContracts; +}; + +/** + * Load staking modules contracts registered in the staking router. + */ +const getStakingModules = async (stakingRouter: LoadedContract, config: ProtocolNetworkConfig) => { + const [nor, sdvt] = await stakingRouter.getStakingModules(); + return (await batch({ + nor: loadContract("NodeOperatorsRegistry", config.get("nor") || nor.stakingModuleAddress), + sdvt: loadContract("NodeOperatorsRegistry", config.get("sdvt") || sdvt.stakingModuleAddress), + })) as StakingModuleContracts; +}; + +/** + * Load HashConsensus contract for accounting oracle. + */ +const getHashConsensus = async (accountingOracle: LoadedContract, config: ProtocolNetworkConfig) => { + const hashConsensusAddress = config.get("hashConsensus") || await accountingOracle.getConsensusContract(); + return (await batch({ + hashConsensus: loadContract("HashConsensus", hashConsensusAddress), + })) as HashConsensusContracts; +}; + +export async function discover() { + const networkConfig = await getDiscoveryConfig(); + const locator = await loadContract("LidoLocator", networkConfig.get("locator")); + const foundationContracts = await getFoundationContracts(locator, networkConfig); + + const contracts = { + locator, + ...foundationContracts, + ...(await getAragonContracts(foundationContracts.lido, networkConfig)), + ...(await getStakingModules(foundationContracts.stakingRouter, networkConfig)), + ...(await getHashConsensus(foundationContracts.accountingOracle, networkConfig)), + } as ProtocolContracts; + + log.debug("Contracts discovered", { + "Locator": locator.address, + "Lido": foundationContracts.lido.address, + "Accounting Oracle": foundationContracts.accountingOracle.address, + "Hash Consensus": contracts.hashConsensus.address, + "Execution Layer Rewards Vault": foundationContracts.elRewardsVault.address, + "Withdrawal Queue": foundationContracts.withdrawalQueue.address, + "Withdrawal Vault": foundationContracts.withdrawalVault.address, + "Validators Exit Bus Oracle": foundationContracts.validatorsExitBusOracle.address, + "Oracle Daemon Config": foundationContracts.oracleDaemonConfig.address, + "Oracle Report Sanity Checker": foundationContracts.oracleReportSanityChecker.address, + "Staking Router": foundationContracts.stakingRouter.address, + "Deposit Security Module": foundationContracts.depositSecurityModule.address, + "NOR": contracts.nor.address, + "sDVT": contracts.sdvt.address, + "Kernel": contracts.kernel.address, + "ACL": contracts.acl.address, + "Burner": foundationContracts.burner.address, + "Legacy Oracle": foundationContracts.legacyOracle.address, + }); + + const signers = { + agent: networkConfig.get("agentAddress"), + voting: networkConfig.get("votingAddress"), + easyTrack: networkConfig.get("easyTrackAddress"), + } as ProtocolSigners; + + log.debug("Signers discovered", signers); + + return { contracts, signers }; +} diff --git a/lib/protocol/helpers/accounting.ts b/lib/protocol/helpers/accounting.ts new file mode 100644 index 000000000..a531ebaa4 --- /dev/null +++ b/lib/protocol/helpers/accounting.ts @@ -0,0 +1,786 @@ +import { expect } from "chai"; +import { Result } from "ethers"; +import { ethers } from "hardhat"; + +import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; + +import { AccountingOracle } from "typechain-types"; + +import { + advanceChainTime, + BigIntMath, + certainAddress, + ether, + EXTRA_DATA_FORMAT_EMPTY, + getCurrentBlockTimestamp, + HASH_CONSENSUS_FAR_FUTURE_EPOCH, + impersonate, + log, + ONE_GWEI, + trace, +} from "lib"; + +import { ProtocolContext } from "../types"; + +export type OracleReportOptions = { + clDiff: bigint; + clAppearedValidators: bigint; + elRewardsVaultBalance: bigint | null; + withdrawalVaultBalance: bigint | null; + sharesRequestedToBurn: bigint | null; + withdrawalFinalizationBatches: bigint[]; + simulatedShareRate: bigint | null; + refSlot: bigint | null; + dryRun: boolean; + excludeVaultsBalances: boolean; + skipWithdrawals: boolean; + waitNextReportTime: boolean; + extraDataFormat: bigint; + extraDataHash: string; + extraDataItemsCount: bigint; + extraDataList: Uint8Array; + stakingModuleIdsWithNewlyExitedValidators: bigint[]; + numExitedValidatorsByStakingModule: bigint[]; + reportElVault: boolean; + reportWithdrawalsVault: boolean; + silent: boolean; +}; + +export type OracleReportPushOptions = { + refSlot: bigint; + clBalance: bigint; + numValidators: bigint; + withdrawalVaultBalance: bigint; + elRewardsVaultBalance: bigint; + sharesRequestedToBurn: bigint; + simulatedShareRate: bigint; + stakingModuleIdsWithNewlyExitedValidators?: bigint[]; + numExitedValidatorsByStakingModule?: bigint[]; + withdrawalFinalizationBatches?: bigint[]; + isBunkerMode?: boolean; + extraDataFormat?: bigint; + extraDataHash?: string; + extraDataItemsCount?: bigint; + extraDataList?: Uint8Array; +}; + +const ZERO_HASH = new Uint8Array(32).fill(0); +const ZERO_BYTES32 = "0x" + Buffer.from(ZERO_HASH).toString("hex"); +const SHARE_RATE_PRECISION = 10n ** 27n; +const MIN_MEMBERS_COUNT = 3n; + +/** + * Prepare and push oracle report. + */ +export const report = async ( + ctx: ProtocolContext, + { + clDiff = ether("10"), + clAppearedValidators = 0n, + elRewardsVaultBalance = null, + withdrawalVaultBalance = null, + sharesRequestedToBurn = null, + withdrawalFinalizationBatches = [], + simulatedShareRate = null, + refSlot = null, + dryRun = false, + excludeVaultsBalances = false, + skipWithdrawals = false, + waitNextReportTime = true, + extraDataFormat = EXTRA_DATA_FORMAT_EMPTY, + extraDataHash = ZERO_BYTES32, + extraDataItemsCount = 0n, + extraDataList = new Uint8Array(), + stakingModuleIdsWithNewlyExitedValidators = [], + numExitedValidatorsByStakingModule = [], + reportElVault = true, + reportWithdrawalsVault = true, + } = {} as Partial, +) => { + const { hashConsensus, lido, elRewardsVault, withdrawalVault, burner, accountingOracle } = ctx.contracts; + + // Fast-forward to next report time + if (waitNextReportTime) { + await waitNextAvailableReportTime(ctx); + } + + // Get report slot from the protocol + if (!refSlot) { + ({ refSlot } = await hashConsensus.getCurrentFrame()); + } + + const { beaconValidators, beaconBalance } = await lido.getBeaconStat(); + const postCLBalance = beaconBalance + clDiff; + const postBeaconValidators = beaconValidators + clAppearedValidators; + + log.debug("Beacon", { + "Beacon validators": postBeaconValidators, + "Beacon balance": ethers.formatEther(postCLBalance), + }); + + elRewardsVaultBalance = elRewardsVaultBalance ?? (await ethers.provider.getBalance(elRewardsVault.address)); + withdrawalVaultBalance = withdrawalVaultBalance ?? (await ethers.provider.getBalance(withdrawalVault.address)); + + log.debug("Balances", { + "Withdrawal vault": ethers.formatEther(withdrawalVaultBalance), + "ElRewards vault": ethers.formatEther(elRewardsVaultBalance), + }); + + // excludeVaultsBalance safely forces LIDO to see vault balances as empty allowing zero/negative rebase + // simulateReports needs proper withdrawal and elRewards vaults balances + + if (excludeVaultsBalances) { + if (!reportWithdrawalsVault || !reportElVault) { + log.warning("excludeVaultsBalances overrides reportWithdrawalsVault and reportElVault"); + } + reportWithdrawalsVault = false; + reportElVault = false; + } + + withdrawalVaultBalance = reportWithdrawalsVault ? withdrawalVaultBalance : 0n; + elRewardsVaultBalance = reportElVault ? elRewardsVaultBalance : 0n; + + if (sharesRequestedToBurn === null) { + const [coverShares, nonCoverShares] = await burner.getSharesRequestedToBurn(); + sharesRequestedToBurn = coverShares + nonCoverShares; + } + + log.debug("Burner", { + "Shares Requested To Burn": sharesRequestedToBurn, + "Withdrawal vault": ethers.formatEther(withdrawalVaultBalance), + "ElRewards vault": ethers.formatEther(elRewardsVaultBalance), + }); + + let isBunkerMode = false; + + if (!skipWithdrawals) { + const params = { + refSlot, + beaconValidators: postBeaconValidators, + clBalance: postCLBalance, + withdrawalVaultBalance, + elRewardsVaultBalance, + }; + + const simulatedReport = await simulateReport(ctx, params); + + expect(simulatedReport).to.not.be.undefined; + + const { postTotalPooledEther, postTotalShares, withdrawals, elRewards } = simulatedReport!; + + log.debug("Simulated report", { + "Post Total Pooled Ether": ethers.formatEther(postTotalPooledEther), + "Post Total Shares": postTotalShares, + "Withdrawals": ethers.formatEther(withdrawals), + "El Rewards": ethers.formatEther(elRewards), + }); + + if (simulatedShareRate === null) { + simulatedShareRate = (postTotalPooledEther * SHARE_RATE_PRECISION) / postTotalShares; + } + + if (withdrawalFinalizationBatches.length === 0) { + withdrawalFinalizationBatches = await getFinalizationBatches(ctx, { + shareRate: simulatedShareRate, + limitedWithdrawalVaultBalance: withdrawals, + limitedElRewardsVaultBalance: elRewards, + }); + } + + isBunkerMode = (await lido.getTotalPooledEther()) > postTotalPooledEther; + + log.debug("Bunker Mode", { "Is Active": isBunkerMode }); + } else if (simulatedShareRate === null) { + simulatedShareRate = 0n; + } + + if (dryRun) { + const reportData = { + consensusVersion: await accountingOracle.getConsensusVersion(), + refSlot, + numValidators: postBeaconValidators, + clBalanceGwei: postCLBalance / ONE_GWEI, + stakingModuleIdsWithNewlyExitedValidators, + numExitedValidatorsByStakingModule, + withdrawalVaultBalance, + elRewardsVaultBalance, + sharesRequestedToBurn, + withdrawalFinalizationBatches, + simulatedShareRate, + isBunkerMode, + extraDataFormat, + extraDataHash, + extraDataItemsCount, + } as AccountingOracle.ReportDataStruct; + + log.debug("Final Report (Dry Run)", { + "Consensus version": reportData.consensusVersion, + "Ref slot": reportData.refSlot, + "CL balance": reportData.clBalanceGwei, + "Num validators": reportData.numValidators, + "Withdrawal vault balance": reportData.withdrawalVaultBalance, + "EL rewards vault balance": reportData.elRewardsVaultBalance, + "Shares requested to burn": reportData.sharesRequestedToBurn, + "Withdrawal finalization batches": reportData.withdrawalFinalizationBatches, + "Simulated share rate": reportData.simulatedShareRate, + "Is bunker mode": reportData.isBunkerMode, + "Extra data format": reportData.extraDataFormat, + "Extra data hash": reportData.extraDataHash, + "Extra data items count": reportData.extraDataItemsCount, + }); + + return { report: reportData, reportTx: undefined, extraDataTx: undefined }; + } + + const reportParams = { + refSlot, + clBalance: postCLBalance, + numValidators: postBeaconValidators, + withdrawalVaultBalance, + elRewardsVaultBalance, + sharesRequestedToBurn, + simulatedShareRate, + stakingModuleIdsWithNewlyExitedValidators, + numExitedValidatorsByStakingModule, + withdrawalFinalizationBatches, + isBunkerMode, + extraDataFormat, + extraDataHash, + extraDataItemsCount, + extraDataList, + }; + + return submitReport(ctx, reportParams); +}; + +/** + * Wait for the next available report time. + */ +export const waitNextAvailableReportTime = async (ctx: ProtocolContext): Promise => { + const { hashConsensus } = ctx.contracts; + const { slotsPerEpoch, secondsPerSlot, genesisTime } = await hashConsensus.getChainConfig(); + const { refSlot } = await hashConsensus.getCurrentFrame(); + + const time = await getCurrentBlockTimestamp(); + + const { epochsPerFrame } = await hashConsensus.getFrameConfig(); + + log.debug("Current frame", { + "Ref slot": refSlot, + "Ref slot date": new Date(Number(genesisTime + refSlot * secondsPerSlot) * 1000).toUTCString(), + "Epochs per frame": epochsPerFrame, + "Slots per epoch": slotsPerEpoch, + "Seconds per slot": secondsPerSlot, + "Genesis time": genesisTime, + "Current time": time, + }); + + const slotsPerFrame = slotsPerEpoch * epochsPerFrame; + const nextRefSlot = refSlot + slotsPerFrame; + const nextFrameStart = genesisTime + nextRefSlot * secondsPerSlot; + + // add 10 slots to be sure that the next frame starts + const nextFrameStartWithOffset = nextFrameStart + secondsPerSlot * 10n; + + const timeToAdvance = Number(nextFrameStartWithOffset - time); + + await advanceChainTime(timeToAdvance); + + const timeAfterAdvance = await getCurrentBlockTimestamp(); + + const nextFrame = await hashConsensus.getCurrentFrame(); + + log.debug("Next frame", { + "Next ref slot": nextRefSlot, + "Next frame date": new Date(Number(nextFrameStart) * 1000).toUTCString(), + "Time to advance": timeToAdvance, + "Time after advance": timeAfterAdvance, + "Time after advance date": new Date(Number(timeAfterAdvance) * 1000).toUTCString(), + "Ref slot": nextFrame.refSlot, + }); + + expect(nextFrame.refSlot).to.equal(refSlot + slotsPerFrame, "Next frame refSlot is incorrect"); +}; + +/** + * Simulate oracle report to get the expected result. + */ +const simulateReport = async ( + ctx: ProtocolContext, + params: { + refSlot: bigint; + beaconValidators: bigint; + clBalance: bigint; + withdrawalVaultBalance: bigint; + elRewardsVaultBalance: bigint; + }, +): Promise< + { postTotalPooledEther: bigint; postTotalShares: bigint; withdrawals: bigint; elRewards: bigint } | undefined +> => { + const { hashConsensus, accountingOracle, lido } = ctx.contracts; + const { refSlot, beaconValidators, clBalance, withdrawalVaultBalance, elRewardsVaultBalance } = params; + + const { genesisTime, secondsPerSlot } = await hashConsensus.getChainConfig(); + const reportTimestamp = genesisTime + refSlot * secondsPerSlot; + + const accountingOracleAccount = await impersonate(accountingOracle.address, ether("100")); + + log.debug("Simulating oracle report", { + "Ref Slot": refSlot, + "Beacon Validators": beaconValidators, + "CL Balance": ethers.formatEther(clBalance), + "Withdrawal Vault Balance": ethers.formatEther(withdrawalVaultBalance), + "El Rewards Vault Balance": ethers.formatEther(elRewardsVaultBalance), + }); + + const [postTotalPooledEther, postTotalShares, withdrawals, elRewards] = await lido + .connect(accountingOracleAccount) + .handleOracleReport.staticCall( + reportTimestamp, + 1n * 24n * 60n * 60n, // 1 day + beaconValidators, + clBalance, + withdrawalVaultBalance, + elRewardsVaultBalance, + 0n, + [], + 0n, + ); + + log.debug("Simulation result", { + "Post Total Pooled Ether": ethers.formatEther(postTotalPooledEther), + "Post Total Shares": postTotalShares, + "Withdrawals": ethers.formatEther(withdrawals), + "El Rewards": ethers.formatEther(elRewards), + }); + + return { postTotalPooledEther, postTotalShares, withdrawals, elRewards }; +}; + +export const handleOracleReport = async ( + ctx: ProtocolContext, + params: { + beaconValidators: bigint; + clBalance: bigint; + sharesRequestedToBurn: bigint; + withdrawalVaultBalance: bigint; + elRewardsVaultBalance: bigint; + }, +): Promise => { + const { hashConsensus, accountingOracle, lido } = ctx.contracts; + const { beaconValidators, clBalance, sharesRequestedToBurn, withdrawalVaultBalance, elRewardsVaultBalance } = params; + + const { refSlot } = await hashConsensus.getCurrentFrame(); + const { genesisTime, secondsPerSlot } = await hashConsensus.getChainConfig(); + const reportTimestamp = genesisTime + refSlot * secondsPerSlot; + + const accountingOracleAccount = await impersonate(accountingOracle.address, ether("100")); + + try { + log.debug("Handle oracle report", { + "Ref Slot": refSlot, + "Beacon Validators": beaconValidators, + "CL Balance": ethers.formatEther(clBalance), + "Withdrawal Vault Balance": ethers.formatEther(withdrawalVaultBalance), + "El Rewards Vault Balance": ethers.formatEther(elRewardsVaultBalance), + }); + + const handleReportTx = await lido + .connect(accountingOracleAccount) + .handleOracleReport( + reportTimestamp, + 1n * 24n * 60n * 60n, // 1 day + beaconValidators, + clBalance, + withdrawalVaultBalance, + elRewardsVaultBalance, + sharesRequestedToBurn, + [], + 0n, + ); + + await trace("lido.handleOracleReport", handleReportTx); + } catch (error) { + log.error("Error", (error as Error).message ?? "Unknown error during oracle report simulation"); + expect(error).to.be.undefined; + } +}; + +/** + * Get finalization batches to finalize withdrawals. + */ +const getFinalizationBatches = async ( + ctx: ProtocolContext, + params: { + shareRate: bigint; + limitedWithdrawalVaultBalance: bigint; + limitedElRewardsVaultBalance: bigint; + }, +): Promise => { + const { oracleReportSanityChecker, lido, withdrawalQueue } = ctx.contracts; + const { shareRate, limitedWithdrawalVaultBalance, limitedElRewardsVaultBalance } = params; + + const { requestTimestampMargin } = await oracleReportSanityChecker.getOracleReportLimits(); + + const bufferedEther = await lido.getBufferedEther(); + const unfinalizedSteth = await withdrawalQueue.unfinalizedStETH(); + + const reservedBuffer = BigIntMath.min(bufferedEther, unfinalizedSteth); + const availableEth = limitedWithdrawalVaultBalance + limitedElRewardsVaultBalance + reservedBuffer; + + const blockTimestamp = await getCurrentBlockTimestamp(); + const maxTimestamp = blockTimestamp - requestTimestampMargin; + const MAX_REQUESTS_PER_CALL = 1000n; + + if (availableEth === 0n) { + log.warning("No available ether to request withdrawals"); + return []; + } + + log.debug("Calculating finalization batches", { + "Share Rate": shareRate, + "Available Eth": ethers.formatEther(availableEth), + "Max Timestamp": maxTimestamp, + }); + + const baseState = { + remainingEthBudget: availableEth, + finished: false, + batches: Array(36).fill(0n), + batchesLength: 0n, + }; + + let batchesState = await withdrawalQueue.calculateFinalizationBatches( + shareRate, + maxTimestamp, + MAX_REQUESTS_PER_CALL, + baseState, + ); + + log.debug("Calculated finalization batches", { + "Batches": batchesState.batches.join(", "), + "Finished": batchesState.finished, + "Batches Length": batchesState.batchesLength, + }); + + while (!batchesState.finished) { + const state = { + remainingEthBudget: batchesState.remainingEthBudget, + finished: batchesState.finished, + batches: (batchesState.batches as Result).toArray(), + batchesLength: batchesState.batchesLength, + }; + + batchesState = await withdrawalQueue.calculateFinalizationBatches( + shareRate, + maxTimestamp, + MAX_REQUESTS_PER_CALL, + state, + ); + + log.debug("Calculated finalization batches", { + "Batches": batchesState.batches.join(", "), + "Finished": batchesState.finished, + "Batches Length": batchesState.batchesLength, + }); + } + + return (batchesState.batches as Result).toArray().filter((x) => x > 0n); +}; + +/** + * Main function to push oracle report to the protocol. + */ +export const submitReport = async ( + ctx: ProtocolContext, + { + refSlot, + clBalance, + numValidators, + withdrawalVaultBalance, + elRewardsVaultBalance, + sharesRequestedToBurn, + simulatedShareRate, + stakingModuleIdsWithNewlyExitedValidators = [], + numExitedValidatorsByStakingModule = [], + withdrawalFinalizationBatches = [], + isBunkerMode = false, + extraDataFormat = 0n, + extraDataHash = ZERO_BYTES32, + extraDataItemsCount = 0n, + extraDataList = new Uint8Array(), + } = {} as OracleReportPushOptions, +) => { + const { accountingOracle } = ctx.contracts; + + log.debug("Pushing oracle report", { + "Ref slot": refSlot, + "CL balance": ethers.formatEther(clBalance), + "Validators": numValidators, + "Withdrawal vault": ethers.formatEther(withdrawalVaultBalance), + "El rewards vault": ethers.formatEther(elRewardsVaultBalance), + "Shares requested to burn": sharesRequestedToBurn, + "Simulated share rate": simulatedShareRate, + "Staking module ids with newly exited validators": stakingModuleIdsWithNewlyExitedValidators, + "Num exited validators by staking module": numExitedValidatorsByStakingModule, + "Withdrawal finalization batches": withdrawalFinalizationBatches, + "Is bunker mode": isBunkerMode, + "Extra data format": extraDataFormat, + "Extra data hash": extraDataHash, + "Extra data items count": extraDataItemsCount, + "Extra data list": extraDataList, + }); + + const consensusVersion = await accountingOracle.getConsensusVersion(); + const oracleVersion = await accountingOracle.getContractVersion(); + + const data = { + consensusVersion, + refSlot, + clBalanceGwei: clBalance / ONE_GWEI, + numValidators, + withdrawalVaultBalance, + elRewardsVaultBalance, + sharesRequestedToBurn, + simulatedShareRate, + stakingModuleIdsWithNewlyExitedValidators, + numExitedValidatorsByStakingModule, + withdrawalFinalizationBatches, + isBunkerMode, + extraDataFormat, + extraDataHash, + extraDataItemsCount, + } as AccountingOracle.ReportDataStruct; + + const items = getReportDataItems(data); + const hash = calcReportDataHash(items); + + const submitter = await reachConsensus(ctx, { + refSlot, + reportHash: hash, + consensusVersion, + }); + + log.debug("Pushing oracle report", data); + + const reportTx = await accountingOracle.connect(submitter).submitReportData(data, oracleVersion); + await trace("accountingOracle.submitReportData", reportTx); + + log.debug("Pushing oracle report", { + "Ref slot": refSlot, + "Consensus version": consensusVersion, + "Report hash": hash, + }); + + let extraDataTx; + if (extraDataFormat) { + extraDataTx = await accountingOracle.connect(submitter).submitReportExtraDataList(extraDataList); + await trace("accountingOracle.submitReportExtraDataList", extraDataTx); + } else { + extraDataTx = await accountingOracle.connect(submitter).submitReportExtraDataEmpty(); + await trace("accountingOracle.submitReportExtraDataEmpty", extraDataTx); + } + + const state = await accountingOracle.getProcessingState(); + + log.debug("Processing state", { + "State ref slot": state.currentFrameRefSlot, + "State main data hash": state.mainDataHash, + "State main data submitted": state.mainDataSubmitted, + "State extra data hash": state.extraDataHash, + "State extra data format": state.extraDataFormat, + "State extra data submitted": state.extraDataSubmitted, + "State extra data items count": state.extraDataItemsCount, + "State extra data items submitted": state.extraDataItemsSubmitted, + }); + + expect(state.currentFrameRefSlot).to.equal(refSlot, "Processing state ref slot is incorrect"); + expect(state.mainDataHash).to.equal(hash, "Processing state main data hash is incorrect"); + expect(state.mainDataSubmitted).to.be.true; + expect(state.extraDataHash).to.equal(extraDataHash, "Processing state extra data hash is incorrect"); + expect(state.extraDataFormat).to.equal(extraDataFormat, "Processing state extra data format is incorrect"); + expect(state.extraDataSubmitted).to.be.true; + expect(state.extraDataItemsCount).to.equal( + extraDataItemsCount, + "Processing state extra data items count is incorrect", + ); + expect(state.extraDataItemsSubmitted).to.equal( + extraDataItemsCount, + "Processing state extra data items submitted is incorrect", + ); + + log.debug("Oracle report pushed", { + "Ref slot": refSlot, + "Consensus version": consensusVersion, + "Report hash": hash, + }); + + return { report, reportTx, extraDataTx }; +}; + +/** + * Ensure that the oracle committee has the required number of members. + */ +export const ensureOracleCommitteeMembers = async ( + ctx: ProtocolContext, + minMembersCount = MIN_MEMBERS_COUNT, +) => { + const { hashConsensus } = ctx.contracts; + + const members = await hashConsensus.getFastLaneMembers(); + const addresses = members.addresses.map((address) => address.toLowerCase()); + + const agentSigner = await ctx.getSigner("agent"); + + if (addresses.length >= minMembersCount) { + log.debug("Oracle committee members count is sufficient", { + "Min members count": minMembersCount, + "Members count": addresses.length, + "Members": addresses.join(", "), + }); + + return; + } + + const managementRole = await hashConsensus.MANAGE_MEMBERS_AND_QUORUM_ROLE(); + await hashConsensus.connect(agentSigner).grantRole(managementRole, agentSigner); + + let count = addresses.length; + while (addresses.length < minMembersCount) { + log.warning(`Adding oracle committee member ${count}`); + + const address = getOracleCommitteeMemberAddress(count); + const addTx = await hashConsensus.connect(agentSigner).addMember(address, minMembersCount); + await trace("hashConsensus.addMember", addTx); + + addresses.push(address); + + log.success(`Added oracle committee member ${count}`); + + count++; + } + + await hashConsensus.connect(agentSigner).renounceRole(managementRole, agentSigner); + + log.debug("Checked oracle committee members count", { + "Min members count": minMembersCount, + "Members count": addresses.length, + "Members": addresses.join(", "), + }); + + expect(addresses.length).to.be.gte(minMembersCount); +}; + +export const ensureHashConsensusInitialEpoch = async (ctx: ProtocolContext) => { + const { hashConsensus } = ctx.contracts; + + const { initialEpoch } = await hashConsensus.getFrameConfig(); + if (initialEpoch === HASH_CONSENSUS_FAR_FUTURE_EPOCH) { + log.warning("Initializing hash consensus epoch..."); + + const latestBlockTimestamp = await getCurrentBlockTimestamp(); + const { genesisTime, secondsPerSlot, slotsPerEpoch } = await hashConsensus.getChainConfig(); + const updatedInitialEpoch = (latestBlockTimestamp - genesisTime) / (slotsPerEpoch * secondsPerSlot); + + const agentSigner = await ctx.getSigner("agent"); + + const tx = await hashConsensus.connect(agentSigner).updateInitialEpoch(updatedInitialEpoch); + await trace("hashConsensus.updateInitialEpoch", tx); + + log.success("Hash consensus epoch initialized"); + } +}; + +/** + * Submit reports from all fast lane members to reach consensus on the report. + */ +const reachConsensus = async ( + ctx: ProtocolContext, + params: { + refSlot: bigint; + reportHash: string; + consensusVersion: bigint; + }, +) => { + const { hashConsensus } = ctx.contracts; + const { refSlot, reportHash, consensusVersion } = params; + + const { addresses } = await hashConsensus.getFastLaneMembers(); + + let submitter: HardhatEthersSigner | null = null; + + log.debug("Reaching consensus", { + "Ref slot": refSlot, + "Report hash": reportHash, + "Consensus version": consensusVersion, + "Addresses": addresses.join(", "), + }); + + for (const address of addresses) { + const member = await impersonate(address, ether("1")); + if (!submitter) { + submitter = member; + } + + const tx = await hashConsensus.connect(member).submitReport(refSlot, reportHash, consensusVersion); + await trace("hashConsensus.submitReport", tx); + } + + const { consensusReport } = await hashConsensus.getConsensusState(); + + expect(consensusReport).to.equal(reportHash, "Consensus report hash is incorrect"); + + return submitter as HardhatEthersSigner; +}; + +/** + * Helper function to get report data items in the required order. + */ +const getReportDataItems = (data: AccountingOracle.ReportDataStruct) => [ + data.consensusVersion, + data.refSlot, + data.numValidators, + data.clBalanceGwei, + data.stakingModuleIdsWithNewlyExitedValidators, + data.numExitedValidatorsByStakingModule, + data.withdrawalVaultBalance, + data.elRewardsVaultBalance, + data.sharesRequestedToBurn, + data.withdrawalFinalizationBatches, + data.simulatedShareRate, + data.isBunkerMode, + data.extraDataFormat, + data.extraDataHash, + data.extraDataItemsCount, +]; + +/** + * Helper function to calculate hash of the report data. + */ +const calcReportDataHash = (items: ReturnType) => { + const types = [ + "uint256", // consensusVersion + "uint256", // refSlot + "uint256", // numValidators + "uint256", // clBalanceGwei + "uint256[]", // stakingModuleIdsWithNewlyExitedValidators + "uint256[]", // numExitedValidatorsByStakingModule + "uint256", // withdrawalVaultBalance + "uint256", // elRewardsVaultBalance + "uint256", // sharesRequestedToBurn + "uint256[]", // withdrawalFinalizationBatches + "uint256", // simulatedShareRate + "bool", // isBunkerMode + "uint256", // extraDataFormat + "bytes32", // extraDataHash + "uint256", // extraDataItemsCount + ]; + + const data = ethers.AbiCoder.defaultAbiCoder().encode([`(${types.join(",")})`], [items]); + return ethers.keccak256(data); +}; + +/** + * Helper function to get oracle committee member address by id. + */ +const getOracleCommitteeMemberAddress = (id: number) => certainAddress(`AO:HC:OC:${id}`); diff --git a/lib/protocol/helpers/index.ts b/lib/protocol/helpers/index.ts new file mode 100644 index 000000000..7bac22b1f --- /dev/null +++ b/lib/protocol/helpers/index.ts @@ -0,0 +1,29 @@ +export { + unpauseStaking, + ensureStakeLimit, +} from "./staking"; + + +export { + unpauseWithdrawalQueue, + finalizeWithdrawalQueue, +} from "./withdrawal"; + +export { + OracleReportOptions, + OracleReportPushOptions, + ensureHashConsensusInitialEpoch, + ensureOracleCommitteeMembers, + waitNextAvailableReportTime, + handleOracleReport, + submitReport, + report, +} from "./accounting"; + +export { + sdvtEnsureOperators, +} from "./sdvt.helper"; + +export { + norEnsureOperators, +} from "./nor.helper"; diff --git a/lib/protocol/helpers/nor.helper.ts b/lib/protocol/helpers/nor.helper.ts new file mode 100644 index 000000000..4c11d0511 --- /dev/null +++ b/lib/protocol/helpers/nor.helper.ts @@ -0,0 +1,208 @@ +import { expect } from "chai"; +import { randomBytes } from "ethers"; + +import { certainAddress, log, trace } from "lib"; + +import { ProtocolContext, StakingModuleName } from "../types"; + +const MIN_OPS_COUNT = 3n; +const MIN_OP_KEYS_COUNT = 10n; + +const PUBKEY_LENGTH = 48n; +const SIGNATURE_LENGTH = 96n; + +export const norEnsureOperators = async ( + ctx: ProtocolContext, + minOperatorsCount = MIN_OPS_COUNT, + minOperatorKeysCount = MIN_OP_KEYS_COUNT, +) => { + await norEnsureOperatorsHaveMinKeys(ctx, minOperatorsCount, minOperatorKeysCount); + + const { nor } = ctx.contracts; + + for (let operatorId = 0n; operatorId < minOperatorsCount; operatorId++) { + const nodeOperatorBefore = await nor.getNodeOperator(operatorId, false); + + if (nodeOperatorBefore.totalVettedValidators < nodeOperatorBefore.totalAddedValidators) { + await norSetOperatorStakingLimit(ctx, { + operatorId, + limit: nodeOperatorBefore.totalAddedValidators, + }); + } + + const nodeOperatorAfter = await nor.getNodeOperator(operatorId, false); + + expect(nodeOperatorAfter.totalVettedValidators).to.equal(nodeOperatorBefore.totalAddedValidators); + } + + log.debug("Checked NOR operators count", { + "Min operators count": minOperatorsCount, + "Min keys count": minOperatorKeysCount, + }); +}; + +/** + * Fills the Nor operators with some keys to deposit in case there are not enough of them. + */ +const norEnsureOperatorsHaveMinKeys = async ( + ctx: ProtocolContext, + minOperatorsCount = MIN_OPS_COUNT, + minKeysCount = MIN_OP_KEYS_COUNT, +) => { + await norEnsureMinOperators(ctx, minOperatorsCount); + + const { nor } = ctx.contracts; + + for (let operatorId = 0n; operatorId < minOperatorsCount; operatorId++) { + const keysCount = await nor.getTotalSigningKeyCount(operatorId); + + if (keysCount < minKeysCount) { + await norAddOperatorKeys(ctx, { + operatorId, + keysToAdd: minKeysCount - keysCount, + }); + } + + const keysCountAfter = await nor.getTotalSigningKeyCount(operatorId); + + expect(keysCountAfter).to.be.gte(minKeysCount); + } +}; + +/** + * Fills the NOR with some operators in case there are not enough of them. + */ +const norEnsureMinOperators = async (ctx: ProtocolContext, minOperatorsCount = MIN_OPS_COUNT) => { + const { nor } = ctx.contracts; + + const before = await nor.getNodeOperatorsCount(); + let count = 0n; + + while (before + count < minOperatorsCount) { + const operatorId = before + count; + + const operator = { + operatorId, + name: getOperatorName("nor", operatorId), + rewardAddress: getOperatorRewardAddress("nor", operatorId), + managerAddress: getOperatorManagerAddress("nor", operatorId), + }; + + await norAddNodeOperator(ctx, operator); + count++; + } + + const after = await nor.getNodeOperatorsCount(); + + expect(after).to.equal(before + count); + expect(after).to.be.gte(minOperatorsCount); +}; + +/** + * Adds a new node operator to the NOR. + */ +export const norAddNodeOperator = async ( + ctx: ProtocolContext, + params: { + operatorId: bigint; + name: string; + rewardAddress: string; + managerAddress: string; + }, +) => { + const { nor } = ctx.contracts; + const { operatorId, name, rewardAddress, managerAddress } = params; + + log.warning(`Adding fake NOR operator ${operatorId}`); + + const agentSigner = await ctx.getSigner("agent"); + + const addTx = await nor.connect(agentSigner).addNodeOperator(name, rewardAddress); + await trace("nodeOperatorRegistry.addNodeOperator", addTx); + + log.debug("Added NOR fake operator", { + "Operator ID": operatorId, + "Name": name, + "Reward address": rewardAddress, + "Manager address": managerAddress, + }); + + log.success(`Added fake NOR operator ${operatorId}`); +}; + +/** + * Adds some signing keys to the operator in the NOR. + */ +export const norAddOperatorKeys = async ( + ctx: ProtocolContext, + params: { + operatorId: bigint; + keysToAdd: bigint; + }, +) => { + const { nor } = ctx.contracts; + const { operatorId, keysToAdd } = params; + + log.warning(`Adding fake keys to NOR operator ${operatorId}`); + + const totalKeysBefore = await nor.getTotalSigningKeyCount(operatorId); + const unusedKeysBefore = await nor.getUnusedSigningKeyCount(operatorId); + + const votingSigner = await ctx.getSigner("voting"); + + const addKeysTx = await nor + .connect(votingSigner) + .addSigningKeys( + operatorId, + keysToAdd, + randomBytes(Number(keysToAdd * PUBKEY_LENGTH)), + randomBytes(Number(keysToAdd * SIGNATURE_LENGTH)), + ); + await trace("nodeOperatorRegistry.addSigningKeys", addKeysTx); + + const totalKeysAfter = await nor.getTotalSigningKeyCount(operatorId); + const unusedKeysAfter = await nor.getUnusedSigningKeyCount(operatorId); + + expect(totalKeysAfter).to.equal(totalKeysBefore + keysToAdd); + expect(unusedKeysAfter).to.equal(unusedKeysBefore + keysToAdd); + + log.debug("Added NOR fake signing keys", { + "Operator ID": operatorId, + "Keys to add": keysToAdd, + "Total keys before": totalKeysBefore, + "Total keys after": totalKeysAfter, + "Unused keys before": unusedKeysBefore, + "Unused keys after": unusedKeysAfter, + }); + + log.success(`Added fake keys to NOR operator ${operatorId}`); +}; + +/** + * Sets the staking limit for the operator. + */ +const norSetOperatorStakingLimit = async ( + ctx: ProtocolContext, + params: { + operatorId: bigint; + limit: bigint; + }, +) => { + const { nor } = ctx.contracts; + const { operatorId, limit } = params; + + log.warning(`Setting NOR operator ${operatorId} staking limit`); + + const votingSigner = await ctx.getSigner("voting"); + + const setLimitTx = await nor.connect(votingSigner).setNodeOperatorStakingLimit(operatorId, limit); + await trace("nodeOperatorRegistry.setNodeOperatorStakingLimit", setLimitTx); + + log.success(`Set NOR operator ${operatorId} staking limit`); +}; + +export const getOperatorName = (module: StakingModuleName, id: bigint, group: bigint = 0n) => `${module}:op-${group}-${id}`; + +export const getOperatorRewardAddress = (module: StakingModuleName, id: bigint, group: bigint = 0n) => certainAddress(`${module}:op:ra-${group}-${id}`); + +export const getOperatorManagerAddress = (module: StakingModuleName, id: bigint, group: bigint = 0n) => certainAddress(`${module}:op:ma-${group}-${id}`); diff --git a/lib/protocol/helpers/sdvt.helper.ts b/lib/protocol/helpers/sdvt.helper.ts new file mode 100644 index 000000000..9d58304fa --- /dev/null +++ b/lib/protocol/helpers/sdvt.helper.ts @@ -0,0 +1,214 @@ +import { expect } from "chai"; +import { randomBytes } from "ethers"; + +import { impersonate, log, streccak, trace } from "lib"; + +import { ether } from "../../units"; +import { ProtocolContext } from "../types"; + +import { getOperatorManagerAddress, getOperatorName, getOperatorRewardAddress } from "./nor.helper"; + +const MIN_OPS_COUNT = 3n; +const MIN_OP_KEYS_COUNT = 10n; + +const PUBKEY_LENGTH = 48n; +const SIGNATURE_LENGTH = 96n; + +const MANAGE_SIGNING_KEYS_ROLE = streccak("MANAGE_SIGNING_KEYS"); + +export const sdvtEnsureOperators = async ( + ctx: ProtocolContext, + minOperatorsCount = MIN_OPS_COUNT, + minOperatorKeysCount = MIN_OP_KEYS_COUNT, +) => { + await sdvtEnsureOperatorsHaveMinKeys(ctx, minOperatorsCount, minOperatorKeysCount); + + const { sdvt } = ctx.contracts; + + for (let operatorId = 0n; operatorId < minOperatorsCount; operatorId++) { + const nodeOperatorBefore = await sdvt.getNodeOperator(operatorId, false); + + if (nodeOperatorBefore.totalVettedValidators < nodeOperatorBefore.totalAddedValidators) { + await sdvtSetOperatorStakingLimit(ctx, { + operatorId, + limit: nodeOperatorBefore.totalAddedValidators, + }); + } + + const nodeOperatorAfter = await sdvt.getNodeOperator(operatorId, false); + + expect(nodeOperatorAfter.totalVettedValidators).to.equal(nodeOperatorBefore.totalAddedValidators); + } +}; + +/** + * Fills the Simple DVT operators with some keys to deposit in case there are not enough of them. + */ +const sdvtEnsureOperatorsHaveMinKeys = async ( + ctx: ProtocolContext, + minOperatorsCount = MIN_OPS_COUNT, + minKeysCount = MIN_OP_KEYS_COUNT, +) => { + await sdvtEnsureMinOperators(ctx, minOperatorsCount); + + const { sdvt } = ctx.contracts; + + for (let operatorId = 0n; operatorId < minOperatorsCount; operatorId++) { + const unusedKeysCount = await sdvt.getUnusedSigningKeyCount(operatorId); + + if (unusedKeysCount < minKeysCount) { + log.warning(`Adding SDVT fake keys to operator ${operatorId}`); + + await sdvtAddNodeOperatorKeys(ctx, { + operatorId, + keysToAdd: minKeysCount - unusedKeysCount, + }); + } + + const unusedKeysCountAfter = await sdvt.getUnusedSigningKeyCount(operatorId); + + expect(unusedKeysCountAfter).to.be.gte(minKeysCount); + } + + log.debug("Checked SDVT operators keys count", { + "Min operators count": minOperatorsCount, + "Min keys count": minKeysCount, + }); +}; + +/** + * Fills the Simple DVT with some operators in case there are not enough of them. + */ +const sdvtEnsureMinOperators = async (ctx: ProtocolContext, minOperatorsCount = MIN_OPS_COUNT) => { + const { sdvt } = ctx.contracts; + + const before = await sdvt.getNodeOperatorsCount(); + let count = 0n; + + while (before + count < minOperatorsCount) { + const operatorId = before + count; + + const operator = { + operatorId, + name: getOperatorName("sdvt", operatorId), + rewardAddress: getOperatorRewardAddress("sdvt", operatorId), + managerAddress: getOperatorManagerAddress("sdvt", operatorId), + }; + + log.warning(`Adding SDVT fake operator ${operatorId}`); + + await sdvtAddNodeOperator(ctx, operator); + count++; + } + + const after = await sdvt.getNodeOperatorsCount(); + + expect(after).to.equal(before + count); + expect(after).to.be.gte(minOperatorsCount); + + log.debug("Checked SDVT operators count", { + "Min operators count": minOperatorsCount, + "Operators count": after, + }); +}; + +/** + * Adds a new node operator to the Simple DVT. + */ +const sdvtAddNodeOperator = async ( + ctx: ProtocolContext, + params: { + operatorId: bigint; + name: string; + rewardAddress: string; + managerAddress: string; + }, +) => { + const { sdvt, acl } = ctx.contracts; + const { operatorId, name, rewardAddress, managerAddress } = params; + + const easyTrackExecutor = await ctx.getSigner("easyTrack"); + + const addTx = await sdvt.connect(easyTrackExecutor).addNodeOperator(name, rewardAddress); + await trace("simpleDVT.addNodeOperator", addTx); + + const grantPermissionTx = await acl.connect(easyTrackExecutor).grantPermissionP( + managerAddress, + sdvt.address, + MANAGE_SIGNING_KEYS_ROLE, + // See https://legacy-docs.aragon.org/developers/tools/aragonos/reference-aragonos-3#parameter-interpretation for details + [1 << (240 + Number(operatorId))], + ); + await trace("acl.grantPermissionP", grantPermissionTx); + + log.debug("Added SDVT fake operator", { + "Operator ID": operatorId, + "Name": name, + "Reward address": rewardAddress, + "Manager address": managerAddress, + }); +}; + +/** + * Adds some signing keys to the operator in the Simple DVT. + */ +const sdvtAddNodeOperatorKeys = async ( + ctx: ProtocolContext, + params: { + operatorId: bigint; + keysToAdd: bigint; + }, +) => { + const { sdvt } = ctx.contracts; + const { operatorId, keysToAdd } = params; + + const totalKeysBefore = await sdvt.getTotalSigningKeyCount(operatorId); + const unusedKeysBefore = await sdvt.getUnusedSigningKeyCount(operatorId); + const { rewardAddress } = await sdvt.getNodeOperator(operatorId, false); + + const actor = await impersonate(rewardAddress, ether("100")); + + const addKeysTx = await sdvt + .connect(actor) + .addSigningKeys( + operatorId, + keysToAdd, + randomBytes(Number(keysToAdd * PUBKEY_LENGTH)), + randomBytes(Number(keysToAdd * SIGNATURE_LENGTH)), + ); + await trace("simpleDVT.addSigningKeys", addKeysTx); + + const totalKeysAfter = await sdvt.getTotalSigningKeyCount(operatorId); + const unusedKeysAfter = await sdvt.getUnusedSigningKeyCount(operatorId); + + expect(totalKeysAfter).to.equal(totalKeysBefore + keysToAdd); + expect(unusedKeysAfter).to.equal(unusedKeysBefore + keysToAdd); + + log.debug("Added SDVT fake signing keys", { + "Operator ID": operatorId, + "Keys to add": keysToAdd, + "Total keys before": totalKeysBefore, + "Total keys after": totalKeysAfter, + "Unused keys before": unusedKeysBefore, + "Unused keys after": unusedKeysAfter, + }); +}; + +/** + * Sets the staking limit for the operator. + */ +const sdvtSetOperatorStakingLimit = async ( + ctx: ProtocolContext, + params: { + operatorId: bigint; + limit: bigint; + }, +) => { + const { sdvt } = ctx.contracts; + const { operatorId, limit } = params; + + const easyTrackExecutor = await ctx.getSigner("easyTrack"); + + const setLimitTx = await sdvt.connect(easyTrackExecutor).setNodeOperatorStakingLimit(operatorId, limit); + await trace("simpleDVT.setNodeOperatorStakingLimit", setLimitTx); +}; diff --git a/lib/protocol/helpers/staking.ts b/lib/protocol/helpers/staking.ts new file mode 100644 index 000000000..e482f3b54 --- /dev/null +++ b/lib/protocol/helpers/staking.ts @@ -0,0 +1,37 @@ +import { ether, log, trace } from "lib"; + +import { ProtocolContext } from "../types"; + +/** + * Unpauses the staking contract. + */ +export const unpauseStaking = async (ctx: ProtocolContext) => { + const { lido } = ctx.contracts; + if (await lido.isStakingPaused()) { + log.warning("Unpausing staking contract"); + + const votingSigner = await ctx.getSigner("voting"); + const tx = await lido.connect(votingSigner).resume(); + await trace("lido.resume", tx); + + log.success("Staking contract unpaused"); + } +}; + +export const ensureStakeLimit = async (ctx: ProtocolContext) => { + const { lido } = ctx.contracts; + + const stakeLimitInfo = await lido.getStakeLimitFullInfo(); + if (!stakeLimitInfo.isStakingLimitSet) { + log.warning("Setting staking limit"); + + const maxStakeLimit = ether("150000"); + const stakeLimitIncreasePerBlock = ether("20"); // this is an arbitrary value + + const votingSigner = await ctx.getSigner("voting"); + const tx = await lido.connect(votingSigner).setStakingLimit(maxStakeLimit, stakeLimitIncreasePerBlock); + await trace("lido.setStakingLimit", tx); + + log.success("Staking limit set"); + } +}; diff --git a/lib/protocol/helpers/withdrawal.ts b/lib/protocol/helpers/withdrawal.ts new file mode 100644 index 000000000..1b9ed67ce --- /dev/null +++ b/lib/protocol/helpers/withdrawal.ts @@ -0,0 +1,82 @@ +import { expect } from "chai"; +import { ZeroAddress } from "ethers"; + +import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; + +import { ether, log, trace, updateBalance } from "lib"; + +import { ProtocolContext } from "../types"; + +import { report } from "./accounting"; + +/** + * Unpauses the withdrawal queue contract. + */ +export const unpauseWithdrawalQueue = async (ctx: ProtocolContext) => { + const { withdrawalQueue } = ctx.contracts; + if (await withdrawalQueue.isPaused()) { + log.warning("Unpausing withdrawal queue contract"); + + const resumeRole = await withdrawalQueue.RESUME_ROLE(); + const agentSigner = await ctx.getSigner("agent"); + const agentSignerAddress = await agentSigner.getAddress(); + + await withdrawalQueue.connect(agentSigner).grantRole(resumeRole, agentSignerAddress); + + const tx = await withdrawalQueue.connect(agentSigner).resume(); + await trace("withdrawalQueue.resume", tx); + + await withdrawalQueue.connect(agentSigner).revokeRole(resumeRole, agentSignerAddress); + + log.success("Unpaused withdrawal queue contract"); + } +}; + +export const finalizeWithdrawalQueue = async ( + ctx: ProtocolContext, + stEthHolder: HardhatEthersSigner, + ethHolder: HardhatEthersSigner, +) => { + const { lido, withdrawalQueue } = ctx.contracts; + + await updateBalance(ethHolder.address, ether("1000000")); + await updateBalance(stEthHolder.address, ether("1000000")); + + const stEthHolderAmount = ether("10000"); + + // Here sendTransaction is used to validate native way of submitting ETH for stETH + const tx = await stEthHolder.sendTransaction({ to: lido.address, value: stEthHolderAmount }); + await trace("stEthHolder.sendTransaction", tx); + + const stEthHolderBalance = await lido.balanceOf(stEthHolder.address); + expect(stEthHolderBalance).to.approximately(stEthHolderAmount, 10n, "stETH balance increased"); + + let lastFinalizedRequestId = await withdrawalQueue.getLastFinalizedRequestId(); + let lastRequestId = await withdrawalQueue.getLastRequestId(); + + while (lastFinalizedRequestId != lastRequestId) { + await report(ctx); + + lastFinalizedRequestId = await withdrawalQueue.getLastFinalizedRequestId(); + lastRequestId = await withdrawalQueue.getLastRequestId(); + + log.debug("Withdrawal queue status", { + "Last finalized request ID": lastFinalizedRequestId, + "Last request ID": lastRequestId, + }); + + const submitTx = await ctx.contracts.lido + .connect(ethHolder) + .submit(ZeroAddress, { value: ether("10000") }); + + await trace("lido.submit", submitTx); + } + + const submitTx = await ctx.contracts.lido + .connect(ethHolder) + .submit(ZeroAddress, { value: ether("10000") }); + + await trace("lido.submit", submitTx); + + log.success("Finalized withdrawal queue"); +}; diff --git a/lib/protocol/index.ts b/lib/protocol/index.ts new file mode 100644 index 000000000..4a5fe3563 --- /dev/null +++ b/lib/protocol/index.ts @@ -0,0 +1,2 @@ +export { getProtocolContext } from "./context"; +export type { ProtocolContext, ProtocolSigners, ProtocolContracts } from "./types"; diff --git a/lib/protocol/networks.ts b/lib/protocol/networks.ts new file mode 100644 index 000000000..4ba3a5a3f --- /dev/null +++ b/lib/protocol/networks.ts @@ -0,0 +1,100 @@ +import * as process from "node:process"; + +import { ProtocolNetworkItems } from "./types"; + +export async function parseLocalDeploymentJson() { + try { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore - file is missing out of the box, that's why we need to catch the error + return await import("../../deployed-local.json"); + } catch (e) { + throw new Error("Failed to parse deployed-local.json. Did you run scratch deploy?"); + } +} + +export class ProtocolNetworkConfig { + constructor( + public readonly env: Record, + public readonly defaults: Record, + ) { + } + + get(key: keyof ProtocolNetworkItems): string { + return process.env[this.env[key]] || this.defaults[key] || ""; + } +} + +const defaultEnv = { + locator: "LOCATOR_ADDRESS", + // signers + agentAddress: "AGENT_ADDRESS", + votingAddress: "VOTING_ADDRESS", + easyTrackAddress: "EASY_TRACK_EXECUTOR_ADDRESS", + // foundation contracts + accountingOracle: "ACCOUNTING_ORACLE_ADDRESS", + depositSecurityModule: "DEPOSIT_SECURITY_MODULE_ADDRESS", + elRewardsVault: "EL_REWARDS_VAULT_ADDRESS", + legacyOracle: "LEGACY_ORACLE_ADDRESS", + lido: "LIDO_ADDRESS", + oracleReportSanityChecker: "ORACLE_REPORT_SANITY_CHECKER_ADDRESS", + burner: "BURNER_ADDRESS", + stakingRouter: "STAKING_ROUTER_ADDRESS", + validatorsExitBusOracle: "VALIDATORS_EXIT_BUS_ORACLE_ADDRESS", + withdrawalQueue: "WITHDRAWAL_QUEUE_ADDRESS", + withdrawalVault: "WITHDRAWAL_VAULT_ADDRESS", + oracleDaemonConfig: "ORACLE_DAEMON_CONFIG_ADDRESS", + // aragon contracts + kernel: "ARAGON_KERNEL_ADDRESS", + acl: "ARAGON_ACL_ADDRESS", + // stacking modules + nor: "NODE_OPERATORS_REGISTRY_ADDRESS", + sdvt: "SIMPLE_DVT_REGISTRY_ADDRESS", + // hash consensus + hashConsensus: "HASH_CONSENSUS_ADDRESS", +} as ProtocolNetworkItems; + +const getPrefixedEnv = (prefix: string, obj: Record): Record => + Object.fromEntries( + Object.entries(obj).map(([key, value]) => [key, `${prefix}_${value}`]), + ); + +const getDefaults = (obj: Record): Record => + Object.fromEntries( + Object.entries(obj).map(([key]) => [key, ""]), + ); + +export async function getNetworkConfig(network: string): Promise { + const defaults = getDefaults(defaultEnv) as Record; + + switch (network) { + case "local": + const config = await parseLocalDeploymentJson(); + return new ProtocolNetworkConfig( + getPrefixedEnv("LOCAL", defaultEnv), + { + ...defaults, + locator: config["lidoLocator"].proxy.address, + agentAddress: config["app:aragon-agent"].proxy.address, + votingAddress: config["app:aragon-voting"].proxy.address, + // Overrides for local development + easyTrackAddress: config["app:aragon-agent"].proxy.address, + sdvt: config["app:node-operators-registry"].proxy.address, + }); + + case "mainnet-fork": + case "hardhat": + const env = getPrefixedEnv("MAINNET", defaultEnv); + return new ProtocolNetworkConfig(env, { + ...defaults, + locator: "0xC1d0b3DE6792Bf6b4b37EccdcC24e45978Cfd2Eb", + // https://docs.lido.fi/deployed-contracts/#dao-contracts + agentAddress: "0x3e40D73EB977Dc6a537aF587D48316feE66E9C8c", + votingAddress: "0x2e59A20f205bB85a89C53f1936454680651E618e", + // https://docs.lido.fi/deployed-contracts/#easy-track + easyTrackAddress: "0xFE5986E06210aC1eCC1aDCafc0cc7f8D63B3F977", + }); + + default: + throw new Error(`Network ${network} is not supported`); + } +} diff --git a/lib/protocol/provision.ts b/lib/protocol/provision.ts new file mode 100644 index 000000000..1c92ff716 --- /dev/null +++ b/lib/protocol/provision.ts @@ -0,0 +1,27 @@ +import { + ensureHashConsensusInitialEpoch, + ensureOracleCommitteeMembers, + ensureStakeLimit, + norEnsureOperators, + unpauseStaking, + unpauseWithdrawalQueue, +} from "./helpers"; +import { ProtocolContext } from "./types"; + +/** + * In order to make the protocol fully operational from scratch deploy, the additional steps are required: + */ +export const provision = async (ctx: ProtocolContext) => { + + await ensureHashConsensusInitialEpoch(ctx); + + await ensureOracleCommitteeMembers(ctx, 5n); + + await unpauseStaking(ctx); + + await unpauseWithdrawalQueue(ctx); + + await norEnsureOperators(ctx, 3n, 5n); + + await ensureStakeLimit(ctx); +}; diff --git a/lib/protocol/types.ts b/lib/protocol/types.ts new file mode 100644 index 000000000..192b1a3a8 --- /dev/null +++ b/lib/protocol/types.ts @@ -0,0 +1,139 @@ +import { BaseContract as EthersBaseContract, ContractTransactionReceipt, LogDescription } from "ethers"; + +import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; + +import { + AccountingOracle, + ACL, + Burner, + DepositSecurityModule, + HashConsensus, + Kernel, + LegacyOracle, + Lido, + LidoExecutionLayerRewardsVault, + LidoLocator, + NodeOperatorsRegistry, + OracleDaemonConfig, + OracleReportSanityChecker, + StakingRouter, + ValidatorsExitBusOracle, + WithdrawalQueueERC721, + WithdrawalVault, +} from "typechain-types"; + +export type ProtocolNetworkItems = { + locator: string; + // signers + agentAddress: string; + votingAddress: string; + easyTrackAddress: string; + // foundation contracts + accountingOracle: string; + depositSecurityModule: string; + elRewardsVault: string; + legacyOracle: string; + lido: string; + oracleReportSanityChecker: string; + burner: string; + stakingRouter: string; + validatorsExitBusOracle: string; + withdrawalQueue: string; + withdrawalVault: string; + oracleDaemonConfig: string; + // aragon contracts + kernel: string; + acl: string; + // stacking modules + nor: string; + sdvt: string; + // hash consensus + hashConsensus: string; +}; + +export interface ContractTypes { + LidoLocator: LidoLocator; + AccountingOracle: AccountingOracle; + DepositSecurityModule: DepositSecurityModule; + LidoExecutionLayerRewardsVault: LidoExecutionLayerRewardsVault; + LegacyOracle: LegacyOracle; + Lido: Lido; + OracleReportSanityChecker: OracleReportSanityChecker; + Burner: Burner; + StakingRouter: StakingRouter; + ValidatorsExitBusOracle: ValidatorsExitBusOracle; + WithdrawalQueueERC721: WithdrawalQueueERC721; + WithdrawalVault: WithdrawalVault; + OracleDaemonConfig: OracleDaemonConfig; + Kernel: Kernel; + ACL: ACL; + HashConsensus: HashConsensus; + NodeOperatorsRegistry: NodeOperatorsRegistry; +} + +export type ContractName = keyof ContractTypes; +export type ContractType = ContractTypes[Name]; + +export type BaseContract = EthersBaseContract; + +export type LoadedContract = T & { + address: string; +}; + +export type CoreContracts = { + accountingOracle: LoadedContract; + depositSecurityModule: LoadedContract; + elRewardsVault: LoadedContract; + legacyOracle: LoadedContract; + lido: LoadedContract; + oracleReportSanityChecker: LoadedContract; + burner: LoadedContract; + stakingRouter: LoadedContract; + validatorsExitBusOracle: LoadedContract; + withdrawalQueue: LoadedContract; + withdrawalVault: LoadedContract; + oracleDaemonConfig: LoadedContract; +}; + +export type AragonContracts = { + kernel: LoadedContract; + acl: LoadedContract; +}; + +export type StakingModuleContracts = { + nor: LoadedContract; + sdvt: LoadedContract; +}; + +export type StakingModuleName = "nor" | "sdvt"; + +export type HashConsensusContracts = { + hashConsensus: LoadedContract; +}; + +export type ProtocolContracts = { locator: LoadedContract } + & CoreContracts + & AragonContracts + & StakingModuleContracts + & HashConsensusContracts; + +export type ProtocolSigners = { + agent: string; + voting: string; + easyTrack: string; +}; + +export type Signer = keyof ProtocolSigners; + +export type ProtocolContextFlags = { + withSimpleDvtModule: boolean; +} + +export type ProtocolContext = { + contracts: ProtocolContracts; + signers: ProtocolSigners; + interfaces: Array; + flags: ProtocolContextFlags; + getSigner: (signer: Signer, balance?: bigint) => Promise; + getEvents: (receipt: ContractTransactionReceipt, eventName: string) => LogDescription[]; +}; diff --git a/lib/time.ts b/lib/time.ts index 1cb6e7a23..72358d97b 100644 --- a/lib/time.ts +++ b/lib/time.ts @@ -19,7 +19,7 @@ export function days(number: bigint): bigint { export async function getCurrentBlockTimestamp() { const blockNum = await ethers.provider.getBlockNumber(); const block = await ethers.provider.getBlock(blockNum); - return block?.timestamp ?? 0; + return BigInt(block?.timestamp ?? 0); } export async function getNextBlockTimestamp() { @@ -52,6 +52,7 @@ export function formatTimeInterval(sec: number | bigint) { if (typeof sec === "bigint") { sec = parseInt(sec.toString()); } + function floor(n: number, multiplier: number) { return Math.floor(n * multiplier) / multiplier; } diff --git a/lib/transaction.ts b/lib/transaction.ts new file mode 100644 index 000000000..0160a7f39 --- /dev/null +++ b/lib/transaction.ts @@ -0,0 +1,42 @@ +import { + ContractTransactionReceipt, + ContractTransactionResponse, + TransactionReceipt, + TransactionResponse, +} from "ethers"; +import hre, { ethers } from "hardhat"; + +import { log } from "lib"; + +type Transaction = TransactionResponse | ContractTransactionResponse; +type Receipt = TransactionReceipt | ContractTransactionReceipt; + +export const trace = async (name: string, tx: Transaction) => { + const receipt = await tx.wait(); + + if (!receipt) { + log.error("Failed to trace transaction: no receipt!"); + throw new Error(`Failed to trace transaction for ${name}: no receipt!`); + } + + const network = await tx.provider.getNetwork(); + const config = hre.config.networks[network.name]; + const blockGasLimit = "blockGasLimit" in config ? config.blockGasLimit : 30_000_000; + const gasUsedPercent = (Number(receipt.gasUsed) / blockGasLimit) * 100; + + log.traceTransaction(name, { + from: tx.from, + to: tx.to ?? `New contract @ ${receipt.contractAddress}`, + value: ethers.formatEther(tx.value), + gasUsed: ethers.formatUnits(receipt.gasUsed, "wei"), + gasPrice: ethers.formatUnits(receipt.gasPrice, "gwei"), + gasUsedPercent: `${gasUsedPercent.toFixed(2)}%`, + gasLimit: blockGasLimit.toString(), + nonce: tx.nonce, + blockNumber: receipt.blockNumber, + hash: receipt.hash, + status: !!receipt.status, + }); + + return receipt as T; +}; diff --git a/lib/type.ts b/lib/type.ts index d6e7f37bf..1660da4ea 100644 --- a/lib/type.ts +++ b/lib/type.ts @@ -1 +1,15 @@ export type ArrayToUnion = A[number]; + +export type TraceableTransaction = { + from: string; + to: string; + value: string; + gasUsed: string; + gasPrice: string; + gasLimit: string; + gasUsedPercent: string; + nonce: number; + blockNumber: number; + hash: string; + status: boolean; +}; diff --git a/package.json b/package.json index 0619470e6..7c42353b5 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "engines": { "node": ">=20" }, - "packageManager": "yarn@4.2.2", + "packageManager": "yarn@4.3.1", "scripts": { "compile": "hardhat compile", "lint:sol": "solhint 'contracts/**/*.sol'", @@ -15,12 +15,18 @@ "lint:ts:fix": "yarn lint:ts --fix", "lint": "yarn lint:sol && yarn lint:ts", "format": "prettier . --write", + "test": "hardhat test test/**/*.test.ts --parallel", "test:forge": "forge test", - "test": "hardhat test --parallel", - "test:sequential": "hardhat test", "test:coverage": "hardhat coverage", - "test:trace": "hardhat test --trace", - "test:watch": "hardhat watch test", + "test:sequential": "hardhat test test/**/*.test.ts", + "test:trace": "hardhat test test/**/*.test.ts --trace --disabletracer", + "test:fulltrace": "hardhat test test/**/*.test.ts --fulltrace --disabletracer", + "test:watch": "hardhat watch", + "test:integration": "hardhat test test/integration/**/*.ts --bail", + "test:integration:local": "INTEGRATION_SIMPLE_DVT_MODULE=off hardhat test test/integration/**/*.ts --network local --bail", + "test:integration:fork": "hardhat test test/integration/**/*.ts --network mainnet-fork --bail", + "test:integration:trace": "hardhat test test/integration/**/*.ts --trace --disabletracer --bail", + "test:integration:fulltrace": "hardhat test test/integration/**/*.ts --fulltrace --disabletracer --bail", "typecheck": "tsc --noEmit", "prepare": "husky" }, @@ -37,46 +43,46 @@ "@commitlint/config-conventional": "^19.2.2", "@nomicfoundation/hardhat-chai-matchers": "^2.0.7", "@nomicfoundation/hardhat-ethers": "^3.0.6", - "@nomicfoundation/hardhat-ignition": "^0.15.4", - "@nomicfoundation/hardhat-ignition-ethers": "^0.15.4", + "@nomicfoundation/hardhat-ignition": "^0.15.5", + "@nomicfoundation/hardhat-ignition-ethers": "^0.15.5", "@nomicfoundation/hardhat-network-helpers": "^1.0.11", "@nomicfoundation/hardhat-toolbox": "^5.0.0", - "@nomicfoundation/hardhat-verify": "^2.0.8", - "@nomicfoundation/ignition-core": "^0.15.4", + "@nomicfoundation/hardhat-verify": "^2.0.9", + "@nomicfoundation/ignition-core": "^0.15.5", "@typechain/ethers-v6": "^0.5.1", "@typechain/hardhat": "^9.1.0", "@types/chai": "^4.3.16", - "@types/mocha": "10.0.6", - "@types/node": "20.14.2", - "@typescript-eslint/eslint-plugin": "^7.12.0", - "@typescript-eslint/parser": "^7.12.0", + "@types/mocha": "10.0.7", + "@types/node": "20.14.13", + "@typescript-eslint/eslint-plugin": "^7.18.0", + "@typescript-eslint/parser": "^7.18.0", "bigint-conversion": "^2.4.3", - "chai": "^4.4.1", + "chai": "^4.5.0", "chalk": "^4.1.2", "dotenv": "^16.4.5", "eslint": "^8.57.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-no-only-tests": "^3.1.0", - "eslint-plugin-simple-import-sort": "12.1.0", + "eslint-plugin-simple-import-sort": "12.1.1", "ethereumjs-util": "^7.1.5", - "ethers": "^6.13.0", - "glob": "^10.4.1", - "hardhat": "^2.22.5", + "ethers": "^6.13.2", + "glob": "^10.4.5", + "hardhat": "^2.22.7", "hardhat-contract-sizer": "^2.10.0", "hardhat-gas-reporter": "^1.0.10", "hardhat-ignore-warnings": "^0.2.11", - "hardhat-tracer": "3.0.1", + "hardhat-tracer": "3.0.3", "hardhat-watcher": "2.5.0", - "husky": "^9.0.11", - "lint-staged": "^15.2.5", - "prettier": "^3.3.1", - "solhint": "^5.0.1", + "husky": "^9.1.4", + "lint-staged": "^15.2.7", + "prettier": "^3.3.3", + "solhint": "^5.0.2", "solhint-plugin-lido": "^0.0.4", "solidity-coverage": "^0.8.12", "ts-node": "^10.9.2", "tsconfig-paths": "^4.2.0", "typechain": "^8.3.2", - "typescript": "^5.4.5" + "typescript": "^5.5.4" }, "dependencies": { "@aragon/apps-agent": "2.1.0", diff --git a/scripts/scratch/steps/00-populate-deploy-artifact-from-env.ts b/scripts/scratch/steps/00-populate-deploy-artifact-from-env.ts index dd5c2f326..00fea43d5 100644 --- a/scripts/scratch/steps/00-populate-deploy-artifact-from-env.ts +++ b/scripts/scratch/steps/00-populate-deploy-artifact-from-env.ts @@ -1,8 +1,7 @@ import { ethers } from "hardhat"; -import { log, Sk } from "lib"; - -import { persistNetworkState, readNetworkState } from "../../../lib/state-file"; +import { log } from "lib"; +import { persistNetworkState, readNetworkState, Sk } from "lib/state-file"; function getEnvVariable(name: string, defaultValue?: string) { const value = process.env[name]; diff --git a/scripts/scratch/steps/02-deploy-aragon-env.ts b/scripts/scratch/steps/02-deploy-aragon-env.ts index 1a3af97fa..2de97bd50 100644 --- a/scripts/scratch/steps/02-deploy-aragon-env.ts +++ b/scripts/scratch/steps/02-deploy-aragon-env.ts @@ -9,7 +9,7 @@ import { deployImplementation, deployWithoutProxy, makeTx } from "lib/deploy"; import { assignENSName } from "lib/ens"; import { findEvents } from "lib/event"; import { streccak } from "lib/keccak"; -import { log, logSplitter } from "lib/log"; +import { log } from "lib/log"; import { readNetworkState, Sk, updateObjectInState } from "lib/state-file"; async function main() { @@ -114,7 +114,7 @@ async function main() { async function deployAPM(owner: string, labelName: string, ens: ENS, apmRegistryFactory: LoadedContract) { log(`Deploying APM for node ${labelName}.eth...`); - logSplitter(); + log.splitter(); const { parentNode, labelHash, nodeName, node } = await assignENSName( "eth", labelName, @@ -124,12 +124,12 @@ async function deployAPM(owner: string, labelName: string, ens: ENS, apmRegistry "APMRegistryFactory", ); - logSplitter(); + log.splitter(); log(`Using APMRegistryFactory: ${chalk.yellow(apmRegistryFactory.address)}`); const receipt = await makeTx(apmRegistryFactory, "newAPM", [parentNode, labelHash, owner], { from: owner }); const apmAddress = findEvents(receipt, "DeployAPM")[0].args.apm; log(`APMRegistry address: ${chalk.yellow(apmAddress)}`); - logSplitter(); + log.splitter(); const apmRegistry = await getContractAt("APMRegistry", apmAddress); @@ -152,10 +152,10 @@ async function deployAragonID(owner: string, ens: ENS) { const fifsResolvingRegistrarArgs = [await ens.getAddress(), publicResolverAddress, node]; const aragonID = await deployWithoutProxy(Sk.aragonId, "FIFSResolvingRegistrar", owner, fifsResolvingRegistrarArgs); - logSplitter(); + log.splitter(); await assignENSName("eth", "aragonid", owner, ens, aragonID.address, "AragonID"); - logSplitter(); + log.splitter(); await makeTx(aragonID, "register", [streccak("owner"), owner], { from: owner }); return aragonID; diff --git a/scripts/scratch/steps/05-deploy-apm.ts b/scripts/scratch/steps/05-deploy-apm.ts index 8adebe415..0f5300c47 100644 --- a/scripts/scratch/steps/05-deploy-apm.ts +++ b/scripts/scratch/steps/05-deploy-apm.ts @@ -9,7 +9,7 @@ import { makeTx } from "lib/deploy"; import { getENSNodeOwner } from "lib/ens"; import { findEvents } from "lib/event"; import { streccak } from "lib/keccak"; -import { log, logSplitter, yl } from "lib/log"; +import { log, yl } from "lib/log"; import { readNetworkState, Sk, updateObjectInState } from "lib/state-file"; async function main() { @@ -19,7 +19,7 @@ async function main() { let state = readNetworkState({ deployer }); const templateAddress = state.lidoTemplate.address; - logSplitter(); + log.splitter(); log(`APM ENS domain: ${chalk.yellow(state.lidoApmEnsName)}`); log(`Using DAO template: ${chalk.yellow(templateAddress)}`); @@ -31,7 +31,7 @@ async function main() { assert.equal(lidoApmEnsNodeOwner, templateAddress, checkDesc); log.success(checkDesc); - logSplitter(); + log.splitter(); const domain = splitDomain(state.lidoApmEnsName); const parentHash = ethers.namehash(domain.parent); @@ -40,7 +40,7 @@ async function main() { log(`Parent domain: ${chalk.yellow(domain.parent)} ${parentHash}`); log(`Subdomain label: ${chalk.yellow(domain.sub)} ${subHash}`); - logSplitter(); + log.splitter(); const template = await loadContract(LidoTemplate__factory, templateAddress); const lidoApmDeployArguments = [parentHash, subHash]; diff --git a/scripts/scratch/steps/06-create-app-repos.ts b/scripts/scratch/steps/06-create-app-repos.ts index 8d37cc617..c34c7b464 100644 --- a/scripts/scratch/steps/06-create-app-repos.ts +++ b/scripts/scratch/steps/06-create-app-repos.ts @@ -2,7 +2,7 @@ import { ethers } from "hardhat"; import { getContractAt } from "lib/contract"; import { makeTx } from "lib/deploy"; -import { log, logSplitter } from "lib/log"; +import { log } from "lib/log"; import { readNetworkState, setValueInState, Sk } from "lib/state-file"; const NULL_CONTENT_URI = @@ -13,7 +13,7 @@ async function main() { const deployer = (await ethers.provider.getSigner()).address; const state = readNetworkState({ deployer }); - logSplitter(); + log.splitter(); const template = await getContractAt("LidoTemplate", state[Sk.lidoTemplate].address); const createReposArguments = [ diff --git a/scripts/scratch/steps/09-deploy-non-aragon-contracts.ts b/scripts/scratch/steps/09-deploy-non-aragon-contracts.ts index fae746433..0407a852a 100644 --- a/scripts/scratch/steps/09-deploy-non-aragon-contracts.ts +++ b/scripts/scratch/steps/09-deploy-non-aragon-contracts.ts @@ -8,7 +8,7 @@ import { deployWithoutProxy, updateProxyImplementation, } from "lib/deploy"; -import { log, logWideSplitter } from "lib/log"; +import { log } from "lib/log"; import { readNetworkState, Sk, updateObjectInState } from "lib/state-file"; async function main() { @@ -32,7 +32,7 @@ async function main() { const admin = deployer; const sanityChecks = state["oracleReportSanityChecker"].deployParameters; - logWideSplitter(); + log.wideSplitter(); if (!chainSpec.depositContract) { throw new Error(`please specify deposit contract address in state file at /chainSpec/depositContract`); @@ -49,7 +49,7 @@ async function main() { deployer, oracleDaemonConfigArgs, ); - logWideSplitter(); + log.wideSplitter(); // // === DummyEmptyContract === @@ -67,7 +67,7 @@ async function main() { [], dummyContract.address, ); - logWideSplitter(); + log.wideSplitter(); // // === OracleReportSanityChecker === @@ -94,19 +94,19 @@ async function main() { deployer, oracleReportSanityCheckerArgs, ); - logWideSplitter(); + log.wideSplitter(); // // === EIP712StETH === // await deployWithoutProxy(Sk.eip712StETH, "EIP712StETH", deployer, [lidoAddress]); - logWideSplitter(); + log.wideSplitter(); // // === WstETH === // const wstETH = await deployWithoutProxy(Sk.wstETH, "WstETH", deployer, [lidoAddress]); - logWideSplitter(); + log.wideSplitter(); // // === WithdrawalQueueERC721 === @@ -123,7 +123,7 @@ async function main() { deployer, withdrawalQueueERC721Args, ); - logWideSplitter(); + log.wideSplitter(); // // === WithdrawalVault === @@ -148,7 +148,7 @@ async function main() { }, address: withdrawalsManagerProxy.address, }); - logWideSplitter(); + log.wideSplitter(); // // === LidoExecutionLayerRewardsVault === @@ -159,7 +159,7 @@ async function main() { deployer, [lidoAddress, treasuryAddress], ); - logWideSplitter(); + log.wideSplitter(); // // === StakingRouter === @@ -195,7 +195,7 @@ async function main() { `NB: skipping deployment of DepositSecurityModule - using the predefined address ${depositSecurityModuleAddress} instead`, ); } - logWideSplitter(); + log.wideSplitter(); // // === AccountingOracle === @@ -214,7 +214,7 @@ async function main() { deployer, accountingOracleArgs, ); - logWideSplitter(); + log.wideSplitter(); // // === HashConsensus for AccountingOracle === @@ -234,7 +234,7 @@ async function main() { deployer, hashConsensusForAccountingArgs, ); - logWideSplitter(); + log.wideSplitter(); // // === ValidatorsExitBusOracle === @@ -247,7 +247,7 @@ async function main() { deployer, validatorsExitBusOracleArgs, ); - logWideSplitter(); + log.wideSplitter(); // // === HashConsensus for ValidatorsExitBusOracle === @@ -267,7 +267,7 @@ async function main() { deployer, hashConsensusForExitBusArgs, ); - logWideSplitter(); + log.wideSplitter(); // // === Burner === @@ -280,7 +280,7 @@ async function main() { burnerParams.totalNonCoverSharesBurnt, ]; const burner = await deployWithoutProxy(Sk.burner, "Burner", deployer, burnerArgs); - logWideSplitter(); + log.wideSplitter(); // // === LidoLocator: update to valid implementation === diff --git a/scripts/scratch/steps/12-initialize-non-aragon-contracts.ts b/scripts/scratch/steps/12-initialize-non-aragon-contracts.ts index 271d1dbcb..6d933281c 100644 --- a/scripts/scratch/steps/12-initialize-non-aragon-contracts.ts +++ b/scripts/scratch/steps/12-initialize-non-aragon-contracts.ts @@ -2,7 +2,7 @@ import { ethers } from "hardhat"; import { getContractAt } from "lib/contract"; import { makeTx } from "lib/deploy"; -import { log, logWideSplitter } from "lib/log"; +import { log } from "lib/log"; import { readNetworkState, Sk } from "lib/state-file"; import { en0x } from "lib/string"; @@ -59,7 +59,7 @@ async function main() { const bootstrapInitBalance = 10n; // wei const lido = await getContractAt("Lido", lidoAddress); await makeTx(lido, "initialize", lidoInitArgs, { value: bootstrapInitBalance, from: deployer }); - logWideSplitter(); + log.wideSplitter(); // // === LegacyOracle: initialize === @@ -126,7 +126,7 @@ async function main() { ]; const stakingRouter = await getContractAt("StakingRouter", stakingRouterAddress); await makeTx(stakingRouter, "initialize", stakingRouterArgs, { from: deployer }); - logWideSplitter(); + log.wideSplitter(); // // === OracleDaemonConfig: set parameters === diff --git a/scripts/scratch/steps/13-grant-roles.ts b/scripts/scratch/steps/13-grant-roles.ts index b9c489973..dd17ff5b3 100644 --- a/scripts/scratch/steps/13-grant-roles.ts +++ b/scripts/scratch/steps/13-grant-roles.ts @@ -2,7 +2,7 @@ import { ethers } from "hardhat"; import { getContractAt } from "lib/contract"; import { makeTx } from "lib/deploy"; -import { log, logWideSplitter } from "lib/log"; +import { log } from "lib/log"; import { readNetworkState, Sk } from "lib/state-file"; async function main() { @@ -49,7 +49,7 @@ async function main() { [await stakingRouter.getFunction("REPORT_REWARDS_MINTED_ROLE")(), lidoAddress], { from: deployer }, ); - logWideSplitter(); + log.wideSplitter(); // // === ValidatorsExitBusOracle @@ -62,7 +62,7 @@ async function main() { [await validatorsExitBusOracle.getFunction("PAUSE_ROLE")(), gateSealAddress], { from: deployer }, ); - logWideSplitter(); + log.wideSplitter(); } else { log(`GateSeal is not specified or deployed: skipping assigning PAUSE_ROLE of validatorsExitBusOracle`); } @@ -87,7 +87,7 @@ async function main() { [await withdrawalQueue.getFunction("ORACLE_ROLE")(), accountingOracleAddress], { from: deployer }, ); - logWideSplitter(); + log.wideSplitter(); // // === Burner diff --git a/scripts/upgrade/deploy-locator.ts b/scripts/upgrade/deploy-locator.ts index b1ed107a5..614bce889 100644 --- a/scripts/upgrade/deploy-locator.ts +++ b/scripts/upgrade/deploy-locator.ts @@ -23,6 +23,7 @@ const VIEW_NAMES_AND_CTOR_ARGS = [ /////////////// GLOBAL VARIABLES /////////////// const g_newAddresses: { [key: string]: string } = {}; + /////////////// GLOBAL VARIABLES /////////////// async function getNewFromEnvOrCurrent(name: string, locator: LoadedContract) { diff --git a/test/0.4.24/lib/signingKeys.test.ts b/test/0.4.24/lib/signingKeys.test.ts index 2ad0e74c0..3f9ecdf8b 100644 --- a/test/0.4.24/lib/signingKeys.test.ts +++ b/test/0.4.24/lib/signingKeys.test.ts @@ -296,13 +296,13 @@ describe("SigningKeys.sol", () => { // @note This also tests the `initKeysSigsBuf` function, because they are related context("loadKeysSigs", () => { it("Loads the keys and signatures correctly", async () => { - const [publicKeys, signatures] = firstNodeOperatorKeys.slice(); + const [publicKeys, keySignatures] = firstNodeOperatorKeys.slice(); await signingKeys.saveKeysSigs( firstNodeOperatorId, firstNodeOperatorStartIndex, firstNodeOperatorKeys.count, publicKeys, - signatures, + keySignatures, ); for (let i = 0; i < firstNodeOperatorKeys.count; ++i) { diff --git a/test/0.4.24/lib/stakeLimitUtils.test.ts b/test/0.4.24/lib/stakeLimitUtils.test.ts index fd4860422..6506a26df 100644 --- a/test/0.4.24/lib/stakeLimitUtils.test.ts +++ b/test/0.4.24/lib/stakeLimitUtils.test.ts @@ -213,7 +213,7 @@ describe("StakeLimitUtils.sol", () => { it("the full limit gets restored after growth blocks", async () => { prevStakeBlockNumber = BigInt(await latestBlock()); - const prevStakeLimit = 0n; + const baseStakeLimit = 0n; await stakeLimitUtils.harness_setState(prevStakeBlockNumber, 0n, maxStakeLimitGrowthBlocks, maxStakeLimit); // 1 block passed due to the setter call above expect(await stakeLimitUtils.calculateCurrentStakeLimit()).to.equal( @@ -223,7 +223,7 @@ describe("StakeLimitUtils.sol", () => { // growth blocks passed (might be not equal to maxStakeLimit yet due to rounding) await mineUpTo(BigInt(prevStakeBlockNumber) + maxStakeLimitGrowthBlocks); expect(await stakeLimitUtils.calculateCurrentStakeLimit()).to.equal( - prevStakeLimit + maxStakeLimitGrowthBlocks * (maxStakeLimit / maxStakeLimitGrowthBlocks), + baseStakeLimit + maxStakeLimitGrowthBlocks * (maxStakeLimit / maxStakeLimitGrowthBlocks), ); // move forward one more block to account for rounding and reach max @@ -294,9 +294,9 @@ describe("StakeLimitUtils.sol", () => { context("reset prev stake limit cases", () => { it("staking was paused", async () => { - const prevStakeBlockNumber = 0n; // staking is paused + const baseStakeBlockNumber = 0n; // staking is paused await stakeLimitUtils.harness_setState( - prevStakeBlockNumber, + baseStakeBlockNumber, prevStakeLimit, maxStakeLimitGrowthBlocks, maxStakeLimit, @@ -308,7 +308,7 @@ describe("StakeLimitUtils.sol", () => { const state = await stakeLimitUtils.harness_getState(); - expect(state.prevStakeBlockNumber).to.equal(prevStakeBlockNumber); + expect(state.prevStakeBlockNumber).to.equal(baseStakeBlockNumber); expect(state.maxStakeLimit).to.equal(maxStakeLimit); expect(state.maxStakeLimitGrowthBlocks).to.equal(maxStakeLimitGrowthBlocks); // prev stake limit reset @@ -316,12 +316,12 @@ describe("StakeLimitUtils.sol", () => { }); it("staking was unlimited", async () => { - const maxStakeLimit = 0n; // staking is unlimited + const noStakeLimit = 0n; // staking is unlimited await stakeLimitUtils.harness_setState( prevStakeBlockNumber, prevStakeLimit, maxStakeLimitGrowthBlocks, - maxStakeLimit, + noStakeLimit, ); const updatedMaxStakeLimit = 10n ** 18n; @@ -417,11 +417,11 @@ describe("StakeLimitUtils.sol", () => { await expect(stakeLimitUtils.updatePrevStakeLimit(updatedValue)) .to.emit(stakeLimitUtils, "PrevStakeLimitUpdated") .withArgs(updatedValue); - const prevStakeBlockNumber = await latestBlock(); + const stakeBlockNumber = await latestBlock(); const state = await stakeLimitUtils.harness_getState(); - expect(state.prevStakeBlockNumber).to.equal(prevStakeBlockNumber); + expect(state.prevStakeBlockNumber).to.equal(stakeBlockNumber); expect(state.prevStakeLimit).to.equal(updatedValue); expect(state.maxStakeLimit).to.equal(maxStakeLimit); expect(state.maxStakeLimitGrowthBlocks).to.equal(maxStakeLimitGrowthBlocks); diff --git a/test/0.4.24/lido/lido.handleOracleReport.test.ts b/test/0.4.24/lido/lido.handleOracleReport.test.ts index 8861c7e06..e56ee5b05 100644 --- a/test/0.4.24/lido/lido.handleOracleReport.test.ts +++ b/test/0.4.24/lido/lido.handleOracleReport.test.ts @@ -579,9 +579,9 @@ describe("Lido:report", () => { expect(await locator.postTokenRebaseReceiver()).to.equal(ZeroAddress); const accountingOracleAddress = await locator.accountingOracle(); - const accountingOracle = await impersonate(accountingOracleAddress, ether("1000.0")); + const accountingOracleSigner = await impersonate(accountingOracleAddress, ether("1000.0")); - await expect(lido.connect(accountingOracle).handleOracleReport(...report())).not.to.emit( + await expect(lido.connect(accountingOracleSigner).handleOracleReport(...report())).not.to.emit( postTokenRebaseReceiver, "Mock__PostTokenRebaseHandled", ); diff --git a/test/0.4.24/oracle/legacyOracle.test.ts b/test/0.4.24/oracle/legacyOracle.test.ts index 9600c56cd..6fe6b902d 100644 --- a/test/0.4.24/oracle/legacyOracle.test.ts +++ b/test/0.4.24/oracle/legacyOracle.test.ts @@ -264,10 +264,10 @@ describe("LegacyOracle.sol", () => { const accountingOracleAddress = await accountingOracle.getAddress(); const accountingOracleActor = await impersonate(accountingOracleAddress, ether("1000")); - const refSlot = 0n; - const expectedEpochId = (refSlot + 1n) / SLOTS_PER_EPOCH; + const baseRefSlot = 0n; + const expectedEpochId = (baseRefSlot + 1n) / SLOTS_PER_EPOCH; - await expect(legacyOracle.connect(accountingOracleActor).handleConsensusLayerReport(refSlot, 0, 0)) + await expect(legacyOracle.connect(accountingOracleActor).handleConsensusLayerReport(baseRefSlot, 0, 0)) .to.emit(legacyOracle, "Completed") .withArgs(expectedEpochId, 0, 0); @@ -326,13 +326,17 @@ describe("LegacyOracle.sol", () => { initialFastLaneLengthSlots, ]); - const accountingOracle = await ethers.deployContract("AccountingOracle__MockForLegacyOracle", [ + const accountingOracleMock = await ethers.deployContract("AccountingOracle__MockForLegacyOracle", [ lido, invalidConsensusContract, secondsPerSlot, ]); - const locatorConfig = { legacyOracle, accountingOracle, lido }; + const locatorConfig = { + lido, + legacyOracle, + accountingOracle: accountingOracleMock, + }; const invalidLocator = await deployLidoLocator(locatorConfig, admin); return { invalidLocator, invalidConsensusContract }; diff --git a/test/0.4.24/stethPermit.test.ts b/test/0.4.24/stethPermit.test.ts index 2c733e245..1c0f73ff8 100644 --- a/test/0.4.24/stethPermit.test.ts +++ b/test/0.4.24/stethPermit.test.ts @@ -16,7 +16,7 @@ import { Snapshot } from "test/suite"; describe("Permit", () => { let deployer: Signer; - let owner: Signer; + let signer: Signer; let originalState: string; let permit: Permit; @@ -25,23 +25,23 @@ describe("Permit", () => { let steth: StethPermitMockWithEip712Initialization; before(async () => { - [deployer, owner] = await ethers.getSigners(); + [deployer, signer] = await ethers.getSigners(); - steth = await new StethPermitMockWithEip712Initialization__factory(deployer).deploy(owner, { + steth = await new StethPermitMockWithEip712Initialization__factory(deployer).deploy(signer, { value: ether("10.0"), }); - const holderBalance = await steth.balanceOf(owner); + const holderBalance = await steth.balanceOf(signer); permit = { - owner: await owner.getAddress(), + owner: await signer.getAddress(), spender: certainAddress("spender"), value: holderBalance, - nonce: await steth.nonces(owner), + nonce: await steth.nonces(signer), deadline: BigInt(await time.latest()) + days(7n), }; - signature = await signPermit(await stethDomain(steth), permit, owner); + signature = await signPermit(await stethDomain(steth), permit, signer); }); beforeEach(async () => (originalState = await Snapshot.take())); diff --git a/test/0.6.12/wsteth.erc20.test.ts b/test/0.6.12/wsteth.erc20.test.ts index 649832349..a500c65d5 100644 --- a/test/0.6.12/wsteth.erc20.test.ts +++ b/test/0.6.12/wsteth.erc20.test.ts @@ -1,7 +1,6 @@ import { ethers } from "hardhat"; -import { WstETH__factory } from "typechain-types"; -import { Steth__MinimalMock__factory } from "typechain-types"; +import { Steth__MinimalMock__factory, WstETH__factory } from "typechain-types"; import { ether } from "lib/units"; diff --git a/test/0.8.4/erc1967proxy.test.ts b/test/0.8.4/erc1967proxy.test.ts index 6f057cf72..f3cbf0bbc 100644 --- a/test/0.8.4/erc1967proxy.test.ts +++ b/test/0.8.4/erc1967proxy.test.ts @@ -6,9 +6,12 @@ import { ethers } from "hardhat"; import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; import { getStorageAt } from "@nomicfoundation/hardhat-network-helpers"; -import { ERC1967Proxy__Harness, ERC1967Proxy__Harness__factory } from "typechain-types"; -import { Impl__MockForERC1967Proxy__factory } from "typechain-types/factories/test/0.8.4/contracts/Impl__MockForERC1967Proxy__factory"; -import { Impl__MockForERC1967Proxy } from "typechain-types/test/0.8.4/contracts/Impl__MockForERC1967Proxy"; +import { + ERC1967Proxy__Harness, + ERC1967Proxy__Harness__factory, + Impl__MockForERC1967Proxy, + Impl__MockForERC1967Proxy__factory, +} from "typechain-types"; import { certainAddress } from "lib"; diff --git a/test/0.8.4/proxy.test.ts b/test/0.8.4/proxy.test.ts index 921831539..b42bc3463 100644 --- a/test/0.8.4/proxy.test.ts +++ b/test/0.8.4/proxy.test.ts @@ -6,10 +6,12 @@ import { ethers } from "hardhat"; import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; import { getStorageAt } from "@nomicfoundation/hardhat-network-helpers"; -import { Impl__MockForERC1967Proxy__factory } from "typechain-types/factories/test/0.8.4/contracts/Impl__MockForERC1967Proxy__factory"; -import { Proxy__Harness__factory } from "typechain-types/factories/test/0.8.4/contracts/Proxy__Harness__factory"; -import { Impl__MockForERC1967Proxy } from "typechain-types/test/0.8.4/contracts/Impl__MockForERC1967Proxy"; -import { Proxy__Harness } from "typechain-types/test/0.8.4/contracts/Proxy__Harness"; +import { + Impl__MockForERC1967Proxy, + Impl__MockForERC1967Proxy__factory, + Proxy__Harness, + Proxy__Harness__factory, +} from "typechain-types"; import { ether } from "lib"; diff --git a/test/0.8.9/burner.test.ts b/test/0.8.9/burner.test.ts index c8b137928..df37947b3 100644 --- a/test/0.8.9/burner.test.ts +++ b/test/0.8.9/burner.test.ts @@ -61,18 +61,18 @@ describe("Burner", () => { }); it("Sets shares burnt to non-zero values", async () => { - const coverSharesBurnt = 1n; - const nonCoverSharesBurnt = 3n; + const differentCoverSharesBurnt = 1n; + const differentNonCoverSharesBurntNonZero = 3n; burner = await new Burner__factory(deployer).deploy( admin, treasury, steth, - coverSharesBurnt, - nonCoverSharesBurnt, + differentCoverSharesBurnt, + differentNonCoverSharesBurntNonZero, ); - expect(await burner.getCoverSharesBurnt()).to.equal(coverSharesBurnt); - expect(await burner.getNonCoverSharesBurnt()).to.equal(nonCoverSharesBurnt); + expect(await burner.getCoverSharesBurnt()).to.equal(differentCoverSharesBurnt); + expect(await burner.getNonCoverSharesBurnt()).to.equal(differentNonCoverSharesBurntNonZero); }); it("Reverts if admin is zero address", async () => { diff --git a/test/0.8.9/depositSecurityModule.test.ts b/test/0.8.9/depositSecurityModule.test.ts index 4ed6ef1df..47f5a36f8 100644 --- a/test/0.8.9/depositSecurityModule.test.ts +++ b/test/0.8.9/depositSecurityModule.test.ts @@ -125,13 +125,13 @@ describe("DepositSecurityModule.sol", () => { }); context("constructor", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `lido` is zero address", async () => { @@ -208,14 +208,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `setOwner`", () => { - let originalState: string; + let originalContextState: string; before(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); after(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `newValue` is zero address", async () => { @@ -249,14 +249,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `setPauseIntentValidityPeriodBlocks`", () => { - let originalState: string; + let originalContextState: string; before(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); after(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `newValue` is zero parameter", async () => { @@ -289,14 +289,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `setMaxDeposits`", () => { - let originalState: string; + let originalContextState: string; before(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); after(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `setMaxDeposits` called by not an owner", async () => { @@ -325,14 +325,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `setMinDepositBlockDistance`", () => { - let originalState: string; + let originalContextState: string; before(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); after(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `setMinDepositBlockDistance` called by not an owner", async () => { @@ -374,15 +374,15 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `setGuardianQuorum`", () => { - let originalState: string; + let originalContextState: string; const guardianQuorum = 1; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `setGuardianQuorum` called by not an owner", async () => { @@ -427,14 +427,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `isGuardian`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Returns false if list of guardians is empty", async () => { @@ -460,14 +460,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `getGuardianIndex`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Returns -1 if list of guardians is empty", async () => { @@ -492,14 +492,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `addGuardian`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `addGuardian` called by not an owner", async () => { @@ -555,14 +555,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `addGuardians`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `addGuardians` called by not an owner", async () => { @@ -604,14 +604,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `removeGuardian`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if the `removeGuardian` called by not an owner", async () => { @@ -695,16 +695,16 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `pauseDeposits`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); await dsm.addGuardians([guardian1, guardian2], 0); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if staking module is unregistered and fires `StakingModuleUnregistered` event on StakingRouter contract", async () => { @@ -854,10 +854,10 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `unpauseDeposits`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); await dsm.addGuardians([guardian1, guardian2], 0); @@ -875,7 +875,7 @@ describe("DepositSecurityModule.sol", () => { }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Reverts if called by not an owner", async () => { @@ -927,14 +927,14 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `canDeposit`", () => { - let originalState: string; + let originalContextState: string; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); it("Returns `false` if staking module is unregistered in StakingRouter", async () => { @@ -1038,12 +1038,12 @@ describe("DepositSecurityModule.sol", () => { }); context("Function `depositBufferedEther`", () => { - let originalState: string; + let originalContextState: string; let validAttestMessage: DSMAttestMessage; let block: Block; beforeEach(async () => { - originalState = await Snapshot.take(); + originalContextState = await Snapshot.take(); block = await getLatestBlock(); await stakingRouter.setStakingModuleNonce(DEPOSIT_NONCE); @@ -1059,7 +1059,7 @@ describe("DepositSecurityModule.sol", () => { }); afterEach(async () => { - await Snapshot.restore(originalState); + await Snapshot.restore(originalContextState); }); context("Total guardians: 0, quorum: 0", () => { diff --git a/test/0.8.9/lidoLocator.test.ts b/test/0.8.9/lidoLocator.test.ts index 280642789..f970de0c0 100644 --- a/test/0.8.9/lidoLocator.test.ts +++ b/test/0.8.9/lidoLocator.test.ts @@ -44,10 +44,10 @@ describe("LidoLocator.sol", () => { context("constructor", () => { for (const service of services) { it(`Reverts if the \`config.${service}\` is zero address`, async () => { - const config = randomConfig(); - config[service] = ZeroAddress; + const randomConfiguration = randomConfig(); + randomConfiguration[service] = ZeroAddress; - await expect(ethers.deployContract("LidoLocator", [config])).to.be.revertedWithCustomError( + await expect(ethers.deployContract("LidoLocator", [randomConfiguration])).to.be.revertedWithCustomError( locator, "ZeroAddress", ); diff --git a/test/0.8.9/oracle/accountingOracle.submitReport.test.ts b/test/0.8.9/oracle/accountingOracle.submitReport.test.ts index 4eaec504a..aec45b239 100644 --- a/test/0.8.9/oracle/accountingOracle.submitReport.test.ts +++ b/test/0.8.9/oracle/accountingOracle.submitReport.test.ts @@ -130,15 +130,15 @@ describe("AccountingOracle.sol:submitReport", () => { await consensus.setTime(deadline); const newReportItems = getReportDataItems(newReportFields); - const reportHash = calcReportDataHash(newReportItems); + const nextReportHash = calcReportDataHash(newReportItems); await consensus.advanceTimeToNextFrameStart(); - await consensus.connect(member1).submitReport(newReportFields.refSlot, reportHash, CONSENSUS_VERSION); + await consensus.connect(member1).submitReport(newReportFields.refSlot, nextReportHash, CONSENSUS_VERSION); return { newReportFields, newReportItems, - reportHash, + reportHash: nextReportHash, }; } @@ -553,30 +553,30 @@ describe("AccountingOracle.sol:submitReport", () => { it("should revert on non-empty format but zero length", async () => { await consensus.setTime(deadline); const { refSlot } = await consensus.getCurrentFrame(); - const reportFields = getReportFields({ + const newReportFields = getReportFields({ refSlot: refSlot, extraDataItemsCount: 0, }); - const reportItems = getReportDataItems(reportFields); - const reportHash = calcReportDataHash(reportItems); - await consensus.connect(member1).submitReport(refSlot, reportHash, CONSENSUS_VERSION); + const newReportItems = getReportDataItems(newReportFields); + const newReportHash = calcReportDataHash(newReportItems); + await consensus.connect(member1).submitReport(refSlot, newReportHash, CONSENSUS_VERSION); await expect( - oracle.connect(member1).submitReportData(reportFields, oracleVersion), + oracle.connect(member1).submitReportData(newReportFields, oracleVersion), ).to.be.revertedWithCustomError(oracle, "ExtraDataItemsCountCannotBeZeroForNonEmptyData"); }); it("should revert on non-empty format but zero hash", async () => { await consensus.setTime(deadline); const { refSlot } = await consensus.getCurrentFrame(); - const reportFields = getReportFields({ + const newReportFields = getReportFields({ refSlot: refSlot, extraDataHash: ZeroHash, }); - const reportItems = getReportDataItems(reportFields); - const reportHash = calcReportDataHash(reportItems); - await consensus.connect(member1).submitReport(refSlot, reportHash, CONSENSUS_VERSION); + const newReportItems = getReportDataItems(newReportFields); + const newReportHash = calcReportDataHash(newReportItems); + await consensus.connect(member1).submitReport(refSlot, newReportHash, CONSENSUS_VERSION); await expect( - oracle.connect(member1).submitReportData(reportFields, oracleVersion), + oracle.connect(member1).submitReportData(newReportFields, oracleVersion), ).to.be.revertedWithCustomError(oracle, "ExtraDataHashCannotBeZeroForNonEmptyData"); }); }); @@ -586,17 +586,17 @@ describe("AccountingOracle.sol:submitReport", () => { await consensus.setTime(deadline); const { refSlot } = await consensus.getCurrentFrame(); const nonZeroHash = keccakFromString("nonZeroHash"); - const reportFields = getReportFields({ + const newReportFields = getReportFields({ refSlot: refSlot, isBunkerMode: false, extraDataFormat: EXTRA_DATA_FORMAT_EMPTY, extraDataHash: nonZeroHash, extraDataItemsCount: 0, }); - const reportItems = getReportDataItems(reportFields); - const reportHash = calcReportDataHash(reportItems); - await consensus.connect(member1).submitReport(refSlot, reportHash, CONSENSUS_VERSION); - await expect(oracle.connect(member1).submitReportData(reportFields, oracleVersion)) + const newReportItems = getReportDataItems(newReportFields); + const newReportHash = calcReportDataHash(newReportItems); + await consensus.connect(member1).submitReport(refSlot, newReportHash, CONSENSUS_VERSION); + await expect(oracle.connect(member1).submitReportData(newReportFields, oracleVersion)) .to.be.revertedWithCustomError(oracle, "UnexpectedExtraDataHash") .withArgs(ZeroHash, nonZeroHash); }); @@ -604,17 +604,17 @@ describe("AccountingOracle.sol:submitReport", () => { it("should revert for non zero ExtraDataLength", async () => { await consensus.setTime(deadline); const { refSlot } = await consensus.getCurrentFrame(); - const reportFields = getReportFields({ + const newReportFields = getReportFields({ refSlot: refSlot, isBunkerMode: false, extraDataFormat: EXTRA_DATA_FORMAT_EMPTY, extraDataHash: ZeroHash, extraDataItemsCount: 10, }); - const reportItems = getReportDataItems(reportFields); - const reportHash = calcReportDataHash(reportItems); - await consensus.connect(member1).submitReport(refSlot, reportHash, CONSENSUS_VERSION); - await expect(oracle.connect(member1).submitReportData(reportFields, oracleVersion)) + const newReportItems = getReportDataItems(newReportFields); + const newReportHash = calcReportDataHash(newReportItems); + await consensus.connect(member1).submitReport(refSlot, newReportHash, CONSENSUS_VERSION); + await expect(oracle.connect(member1).submitReportData(newReportFields, oracleVersion)) .to.be.revertedWithCustomError(oracle, "UnexpectedExtraDataItemsCount") .withArgs(0, 10); }); diff --git a/test/0.8.9/ossifiableProxy.test.ts b/test/0.8.9/ossifiableProxy.test.ts index 8357e80c8..7eec47291 100644 --- a/test/0.8.9/ossifiableProxy.test.ts +++ b/test/0.8.9/ossifiableProxy.test.ts @@ -18,8 +18,8 @@ describe("OssifiableProxy", () => { let proxy: OssifiableProxy; let snapshot: string; let initPayload: string; - let InitializableContract: Initializable__Mock__factory; - let OssifiableProxy: OssifiableProxy__factory; + let initializableContract: Initializable__Mock__factory; + let ossifiableProxy: OssifiableProxy__factory; async function takeSnapshot() { snapshot = await Snapshot.take(); @@ -31,11 +31,11 @@ describe("OssifiableProxy", () => { beforeEach(async () => { [admin, stranger] = await ethers.getSigners(); - InitializableContract = await ethers.getContractFactory("Initializable__Mock"); - OssifiableProxy = await ethers.getContractFactory("OssifiableProxy"); + initializableContract = await ethers.getContractFactory("Initializable__Mock"); + ossifiableProxy = await ethers.getContractFactory("OssifiableProxy"); - currentImpl = await InitializableContract.deploy(); - proxy = await OssifiableProxy.deploy(await currentImpl.getAddress(), await admin.getAddress(), "0x"); + currentImpl = await initializableContract.deploy(); + proxy = await ossifiableProxy.deploy(await currentImpl.getAddress(), await admin.getAddress(), "0x"); initPayload = currentImpl.interface.encodeFunctionData("initialize", [1]); }); @@ -45,8 +45,8 @@ describe("OssifiableProxy", () => { describe("deploy", () => { it("with empty calldata", async () => { - currentImpl = await InitializableContract.deploy(); - proxy = await OssifiableProxy.deploy(await currentImpl.getAddress(), await admin.getAddress(), "0x"); + currentImpl = await initializableContract.deploy(); + proxy = await ossifiableProxy.deploy(await currentImpl.getAddress(), await admin.getAddress(), "0x"); const tx = proxy.deploymentTransaction(); const implInterfaceOnProxyAddr = currentImpl.attach(await proxy.getAddress()) as Initializable__Mock; @@ -59,8 +59,8 @@ describe("OssifiableProxy", () => { }); it("with calldata", async () => { - currentImpl = await InitializableContract.deploy(); - proxy = await OssifiableProxy.deploy(await currentImpl.getAddress(), await admin.getAddress(), initPayload); + currentImpl = await initializableContract.deploy(); + proxy = await ossifiableProxy.deploy(await currentImpl.getAddress(), await admin.getAddress(), initPayload); const tx = proxy.deploymentTransaction(); const implInterfaceOnProxyAddr = currentImpl.attach(await proxy.getAddress()) as Initializable__Mock; diff --git a/test/0.8.9/sanityChecks/baseOracleReportSanityChecker.test.ts b/test/0.8.9/sanityChecks/baseOracleReportSanityChecker.test.ts index d79c8a3b5..bab8baf5b 100644 --- a/test/0.8.9/sanityChecks/baseOracleReportSanityChecker.test.ts +++ b/test/0.8.9/sanityChecks/baseOracleReportSanityChecker.test.ts @@ -22,26 +22,26 @@ describe("OracleReportSanityChecker.sol", () => { let managersRoster: Record; const defaultLimitsList = { - churnValidatorsPerDayLimit: 55, - oneOffCLBalanceDecreaseBPLimit: 5_00, // 5% - annualBalanceIncreaseBPLimit: 10_00, // 10% - simulatedShareRateDeviationBPLimit: 2_50, // 2.5% - maxValidatorExitRequestsPerReport: 2000, - maxAccountingExtraDataListItemsCount: 15, - maxNodeOperatorsPerExtraDataItemCount: 16, - requestTimestampMargin: 128, - maxPositiveTokenRebase: 5_000_000, // 0.05% + churnValidatorsPerDayLimit: 55n, + oneOffCLBalanceDecreaseBPLimit: 5_00n, // 5% + annualBalanceIncreaseBPLimit: 10_00n, // 10% + simulatedShareRateDeviationBPLimit: 2_50n, // 2.5% + maxValidatorExitRequestsPerReport: 2000n, + maxAccountingExtraDataListItemsCount: 15n, + maxNodeOperatorsPerExtraDataItemCount: 16n, + requestTimestampMargin: 128n, + maxPositiveTokenRebase: 5_000_000n, // 0.05% }; const correctLidoOracleReport = { - timeElapsed: 24 * 60 * 60, + timeElapsed: 24n * 60n * 60n, preCLBalance: ether("100000"), postCLBalance: ether("100001"), - withdrawalVaultBalance: 0, - elRewardsVaultBalance: 0, - sharesRequestedToBurn: 0, - preCLValidators: 0, - postCLValidators: 0, + withdrawalVaultBalance: 0n, + elRewardsVaultBalance: 0n, + sharesRequestedToBurn: 0n, + preCLValidators: 0n, + postCLValidators: 0n, }; type CheckAccountingOracleReportParameters = [number, bigint, bigint, number, number, number, number, number]; let deployer: HardhatEthersSigner; @@ -382,15 +382,15 @@ describe("OracleReportSanityChecker.sol", () => { }); describe("checkWithdrawalQueueOracleReport()", () => { - const oldRequestId = 1; - const newRequestId = 2; + const oldRequestId = 1n; + const newRequestId = 2n; let oldRequestCreationTimestamp; - let newRequestCreationTimestamp: number; + let newRequestCreationTimestamp: bigint; const correctWithdrawalQueueOracleReport = { lastFinalizableRequestId: oldRequestId, - refReportTimestamp: -1, + refReportTimestamp: -1n, }; - type CheckWithdrawalQueueOracleReportParameters = [number, number]; + type CheckWithdrawalQueueOracleReportParameters = [bigint, bigint]; before(async () => { const currentBlockTimestamp = await getCurrentBlockTimestamp(); @@ -398,7 +398,7 @@ describe("OracleReportSanityChecker.sol", () => { oldRequestCreationTimestamp = currentBlockTimestamp - defaultLimitsList.requestTimestampMargin; correctWithdrawalQueueOracleReport.lastFinalizableRequestId = oldRequestCreationTimestamp; await withdrawalQueueMock.setRequestTimestamp(oldRequestId, oldRequestCreationTimestamp); - newRequestCreationTimestamp = currentBlockTimestamp - Math.floor(defaultLimitsList.requestTimestampMargin / 2); + newRequestCreationTimestamp = currentBlockTimestamp - defaultLimitsList.requestTimestampMargin / 2n; await withdrawalQueueMock.setRequestTimestamp(newRequestId, newRequestCreationTimestamp); }); @@ -1024,13 +1024,14 @@ describe("OracleReportSanityChecker.sol", () => { describe("churn limit", () => { it("setChurnValidatorsPerDayLimit works", async () => { const oldChurnLimit = defaultLimitsList.churnValidatorsPerDayLimit; + await oracleReportSanityChecker.checkExitedValidatorsRatePerDay(oldChurnLimit); - await expect(oracleReportSanityChecker.checkExitedValidatorsRatePerDay(oldChurnLimit + 1)) + await expect(oracleReportSanityChecker.checkExitedValidatorsRatePerDay(oldChurnLimit + 1n)) .to.be.revertedWithCustomError(oracleReportSanityChecker, "ExitedValidatorsLimitExceeded") - .withArgs(oldChurnLimit, oldChurnLimit + 1); - expect((await oracleReportSanityChecker.getOracleReportLimits()).churnValidatorsPerDayLimit).to.equal( - oldChurnLimit, - ); + .withArgs(oldChurnLimit, oldChurnLimit + 1n); + + const { churnValidatorsPerDayLimit } = await oracleReportSanityChecker.getOracleReportLimits(); + expect(churnValidatorsPerDayLimit).to.equal(oldChurnLimit); const newChurnLimit = 30; expect(newChurnLimit).to.not.equal(oldChurnLimit); @@ -1060,9 +1061,9 @@ describe("OracleReportSanityChecker.sol", () => { it("checkAccountingOracleReport: churnLimit works", async () => { const churnLimit = defaultLimitsList.churnValidatorsPerDayLimit; - expect((await oracleReportSanityChecker.getOracleReportLimits()).churnValidatorsPerDayLimit).to.equal( - churnLimit, - ); + + const { churnValidatorsPerDayLimit } = await oracleReportSanityChecker.getOracleReportLimits(); + expect(churnValidatorsPerDayLimit).to.equal(churnLimit); await oracleReportSanityChecker.checkAccountingOracleReport( ...(Object.values({ @@ -1074,12 +1075,12 @@ describe("OracleReportSanityChecker.sol", () => { oracleReportSanityChecker.checkAccountingOracleReport( ...(Object.values({ ...correctLidoOracleReport, - postCLValidators: churnLimit + 1, + postCLValidators: churnLimit + 1n, }) as CheckAccountingOracleReportParameters), ), ) .to.be.revertedWithCustomError(oracleReportSanityChecker, "IncorrectAppearedValidators") - .withArgs(churnLimit + 1); + .withArgs(churnLimit + 1n); }); }); @@ -1092,12 +1093,13 @@ describe("OracleReportSanityChecker.sol", () => { it("checkExitBusOracleReport works", async () => { const maxRequests = defaultLimitsList.maxValidatorExitRequestsPerReport; + expect((await oracleReportSanityChecker.getOracleReportLimits()).maxValidatorExitRequestsPerReport).to.equal( maxRequests, ); await oracleReportSanityChecker.checkExitBusOracleReport(maxRequests); - await expect(oracleReportSanityChecker.checkExitBusOracleReport(maxRequests + 1)) + await expect(oracleReportSanityChecker.checkExitBusOracleReport(maxRequests + 1n)) .to.be.revertedWithCustomError(oracleReportSanityChecker, "IncorrectNumberOfExitRequestsPerReport") .withArgs(maxRequests); }); @@ -1105,7 +1107,7 @@ describe("OracleReportSanityChecker.sol", () => { it("setMaxExitRequestsPerOracleReport", async () => { const oldMaxRequests = defaultLimitsList.maxValidatorExitRequestsPerReport; await oracleReportSanityChecker.checkExitBusOracleReport(oldMaxRequests); - await expect(oracleReportSanityChecker.checkExitBusOracleReport(oldMaxRequests + 1)) + await expect(oracleReportSanityChecker.checkExitBusOracleReport(oldMaxRequests + 1n)) .to.be.revertedWithCustomError(oracleReportSanityChecker, "IncorrectNumberOfExitRequestsPerReport") .withArgs(oldMaxRequests); expect((await oracleReportSanityChecker.getOracleReportLimits()).maxValidatorExitRequestsPerReport).to.equal( diff --git a/test/0.8.9/withdrawalQueue.test.ts b/test/0.8.9/withdrawalQueue.test.ts index e029fc99d..415d0846e 100644 --- a/test/0.8.9/withdrawalQueue.test.ts +++ b/test/0.8.9/withdrawalQueue.test.ts @@ -328,13 +328,13 @@ describe("WithdrawalQueue.sol", () => { it("Creates requests for multiple amounts with zero owner address", async () => { const amount = ether("10.00"); - const shares = await stEth.getSharesByPooledEth(amount); + const sharesToWithdraw = await stEth.getSharesByPooledEth(amount); const requestIdBefore = await queue.getLastRequestId(); await expect(queue.connect(user).requestWithdrawals([amount], ZeroAddress)) .to.emit(queue, "WithdrawalRequested") - .withArgs(1, user.address, user.address, amount, shares); + .withArgs(1, user.address, user.address, amount, sharesToWithdraw); const diff = (await queue.getLastRequestId()) - requestIdBefore; expect(diff).to.equal(requestIdBefore + 1n); @@ -394,13 +394,13 @@ describe("WithdrawalQueue.sol", () => { const amount = ether("10.00"); const stEthAmount = await wstEth.getStETHByWstETH(amount); - const shares = await stEth.getSharesByPooledEth(stEthAmount); + const sharesToWithdraw = await stEth.getSharesByPooledEth(stEthAmount); const requestIdBefore = await queue.getLastRequestId(); await expect(queue.connect(user).requestWithdrawalsWstETH([amount], ZeroAddress)) .to.emit(queue, "WithdrawalRequested") - .withArgs(1, user.address, user.address, stEthAmount, shares); + .withArgs(1, user.address, user.address, stEthAmount, sharesToWithdraw); const requestIdAfter = await queue.getLastRequestId(); const diff = requestIdAfter - requestIdBefore; @@ -443,15 +443,15 @@ describe("WithdrawalQueue.sol", () => { it("Creates requests for multiple amounts with valid permit", async () => { const oneRequestSize = requests[0]; - const shares = await stEth.getSharesByPooledEth(oneRequestSize); + const sharesToWithdraw = await stEth.getSharesByPooledEth(oneRequestSize); const requestIdBefore = await queue.getLastRequestId(); await expect(queue.connect(alice).requestWithdrawalsWithPermit(requests, owner, permit)) .to.emit(queue, "WithdrawalRequested") - .withArgs(1, alice.address, owner.address, oneRequestSize, shares) + .withArgs(1, alice.address, owner.address, oneRequestSize, sharesToWithdraw) .to.emit(queue, "WithdrawalRequested") - .withArgs(2, alice.address, owner.address, oneRequestSize, shares); + .withArgs(2, alice.address, owner.address, oneRequestSize, sharesToWithdraw); const diff = (await queue.getLastRequestId()) - requestIdBefore; expect(diff).to.equal(requestIdBefore + BigInt(requests.length)); @@ -459,13 +459,13 @@ describe("WithdrawalQueue.sol", () => { it("Creates requests for single amounts with valid permit and zero owner address", async () => { const request = requests[0]; - const shares = await stEth.getSharesByPooledEth(request); + const sharesToWithdraw = await stEth.getSharesByPooledEth(request); const requestIdBefore = await queue.getLastRequestId(); await expect(queue.connect(alice).requestWithdrawalsWithPermit([request], ZeroAddress, permit)) .to.emit(queue, "WithdrawalRequested") - .withArgs(1, alice.address, alice.address, request, shares); + .withArgs(1, alice.address, alice.address, request, sharesToWithdraw); const diff = (await queue.getLastRequestId()) - requestIdBefore; expect(diff).to.equal(requestIdBefore + 1n); @@ -507,14 +507,14 @@ describe("WithdrawalQueue.sol", () => { it("Creates requests for multiple amounts with valid permit", async () => { const oneRequestSize = requests[0]; const stEthAmount = await wstEth.getStETHByWstETH(oneRequestSize); - const shares = await stEth.getSharesByPooledEth(stEthAmount); + const sharesToWithdraw = await stEth.getSharesByPooledEth(stEthAmount); const requestIdBefore = await queue.getLastRequestId(); await expect(queue.connect(alice).requestWithdrawalsWstETHWithPermit(requests, owner, permit)) .to.emit(queue, "WithdrawalRequested") - .withArgs(1, alice.address, owner.address, stEthAmount, shares) + .withArgs(1, alice.address, owner.address, stEthAmount, sharesToWithdraw) .to.emit(queue, "WithdrawalRequested") - .withArgs(2, alice.address, owner.address, stEthAmount, shares); + .withArgs(2, alice.address, owner.address, stEthAmount, sharesToWithdraw); const requestIdAfter = await queue.getLastRequestId(); const diff = requestIdAfter - requestIdBefore; @@ -524,12 +524,12 @@ describe("WithdrawalQueue.sol", () => { it("Creates requests for single amounts with valid permit and zero owner address", async () => { const request = requests[0]; const stEthAmount = await wstEth.getStETHByWstETH(request); - const shares = await stEth.getSharesByPooledEth(stEthAmount); + const sharesToWithdraw = await stEth.getSharesByPooledEth(stEthAmount); const requestIdBefore = await queue.getLastRequestId(); await expect(queue.connect(alice).requestWithdrawalsWstETHWithPermit([request], ZeroAddress, permit)) .to.emit(queue, "WithdrawalRequested") - .withArgs(1, alice.address, alice.address, stEthAmount, shares); + .withArgs(1, alice.address, alice.address, stEthAmount, sharesToWithdraw); const requestIdAfter = await queue.getLastRequestId(); const diff = requestIdAfter - requestIdBefore; diff --git a/test/0.8.9/withdrawalQueueBase.test.ts b/test/0.8.9/withdrawalQueueBase.test.ts index 2e320d6bd..9bf52b1da 100644 --- a/test/0.8.9/withdrawalQueueBase.test.ts +++ b/test/0.8.9/withdrawalQueueBase.test.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { parseUnits } from "ethers"; +import { parseUnits, Result } from "ethers"; import { ethers } from "hardhat"; import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; @@ -199,7 +199,7 @@ describe("WithdrawalQueueBase.sol", () => { buildBatchCalculationState( calc1.remainingEthBudget, calc1.finished, - calc1.batches.map((x) => x), + (calc1.batches as Result).toArray(), calc1.batchesLength, ), ); @@ -233,7 +233,7 @@ describe("WithdrawalQueueBase.sol", () => { buildBatchCalculationState( calc1.remainingEthBudget, calc1.finished, - calc1.batches.map((x) => x), + (calc1.batches as Result).toArray(), calc1.batchesLength, ), ); @@ -272,7 +272,7 @@ describe("WithdrawalQueueBase.sol", () => { buildBatchCalculationState( calc1.remainingEthBudget, calc1.finished, - calc1.batches.map((x) => x), + (calc1.batches as Result).toArray(), calc1.batchesLength, ), ); @@ -311,7 +311,7 @@ describe("WithdrawalQueueBase.sol", () => { buildBatchCalculationState( calc1.remainingEthBudget, calc1.finished, - calc1.batches.map((x) => x), + (calc1.batches as Result).toArray(), calc1.batchesLength, ), ); diff --git a/test/common/erc2612.test.ts b/test/common/erc2612.test.ts index 51fdc65ee..e3bef26c0 100644 --- a/test/common/erc2612.test.ts +++ b/test/common/erc2612.test.ts @@ -26,7 +26,7 @@ export function testERC2612Compliance({ tokenName, deploy, suiteFunction = descr suiteFunction(`${tokenName} ERC-2612 Compliance`, () => { let token: IERC20 & IERC2612; let domain: TypedDataDomain; - let owner: string; + let holder: string; let signer: Signer; let permit: Permit; @@ -35,15 +35,15 @@ export function testERC2612Compliance({ tokenName, deploy, suiteFunction = descr let originalState: string; before(async () => { - ({ token, domain, owner, signer } = await deploy()); + ({ token, domain, owner: holder, signer } = await deploy()); - const holderBalance = await token.balanceOf(owner); + const holderBalance = await token.balanceOf(holder); permit = { - owner, + owner: holder, spender: certainAddress("spender"), value: holderBalance, - nonce: await token.nonces(owner), + nonce: await token.nonces(holder), deadline: BigInt(await time.latest()) + days(7n), }; signature = await signPermit(domain, permit, signer); diff --git a/test/integration/burn-shares.ts b/test/integration/burn-shares.ts new file mode 100644 index 000000000..5f5821cdd --- /dev/null +++ b/test/integration/burn-shares.ts @@ -0,0 +1,108 @@ +import { expect } from "chai"; +import { ZeroAddress } from "ethers"; +import { ethers } from "hardhat"; + +import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; + +import { ether, impersonate, log, trace } from "lib"; +import { getProtocolContext, ProtocolContext } from "lib/protocol"; +import { finalizeWithdrawalQueue, handleOracleReport } from "lib/protocol/helpers"; + +import { Snapshot } from "test/suite"; + +describe("Burn Shares", () => { + let ctx: ProtocolContext; + let snapshot: string; + + let ethHolder: HardhatEthersSigner; + let stEthHolder: HardhatEthersSigner; + let stranger: HardhatEthersSigner; + + const amount = ether("1"); + let sharesToBurn: bigint; + let totalEth: bigint; + let totalShares: bigint; + + before(async () => { + ctx = await getProtocolContext(); + + [stEthHolder, ethHolder, stranger] = await ethers.getSigners(); + + snapshot = await Snapshot.take(); + }); + + after(async () => await Snapshot.restore(snapshot)); + + it("Should finalize withdrawal queue", async () => { + const { withdrawalQueue } = ctx.contracts; + + await finalizeWithdrawalQueue(ctx, stEthHolder, ethHolder); + + const lastFinalizedRequestId = await withdrawalQueue.getLastFinalizedRequestId(); + const lastRequestId = await withdrawalQueue.getLastRequestId(); + + expect(lastFinalizedRequestId).to.equal(lastRequestId); + }); + + it("Should allow stranger to submit ETH", async () => { + const { lido } = ctx.contracts; + + const submitTx = await lido.connect(stranger).submit(ZeroAddress, { value: amount }); + await trace("lido.submit", submitTx); + + const stEthBefore = await lido.balanceOf(stranger.address); + expect(stEthBefore).to.be.approximately(amount, 10n, "Incorrect stETH balance after submit"); + + sharesToBurn = await lido.sharesOf(stranger.address); + totalEth = await lido.totalSupply(); + totalShares = await lido.getTotalShares(); + + log.debug("Shares state before", { + "Stranger shares": sharesToBurn, + "Total ETH": ethers.formatEther(totalEth), + "Total shares": totalShares, + }); + }); + + it("Should not allow stranger to burn shares", async () => { + const { burner } = ctx.contracts; + const burnTx = burner.connect(stranger).commitSharesToBurn(sharesToBurn); + + await expect(burnTx).to.be.revertedWithCustomError(burner, "AppAuthLidoFailed"); + }); + + it("Should burn shares after report", async () => { + const { lido, burner } = ctx.contracts; + + const approveTx = await lido.connect(stranger).approve(burner.address, ether("1000000")); + await trace("lido.approve", approveTx); + + const lidoSigner = await impersonate(lido.address); + const burnTx = await burner.connect(lidoSigner).requestBurnSharesForCover(stranger, sharesToBurn); + await trace("burner.requestBurnSharesForCover", burnTx); + + const { beaconValidators, beaconBalance } = await lido.getBeaconStat(); + + await handleOracleReport(ctx, { + beaconValidators, + clBalance: beaconBalance, + sharesRequestedToBurn: sharesToBurn, + withdrawalVaultBalance: 0n, + elRewardsVaultBalance: 0n, + }); + + const sharesToBurnAfter = await lido.sharesOf(stranger.address); + const totalEthAfter = await lido.totalSupply(); + const totalSharesAfter = await lido.getTotalShares(); + + log.debug("Shares state after", { + "Stranger shares": sharesToBurnAfter, + "Total ETH": ethers.formatEther(totalEthAfter), + "Total shares": totalSharesAfter, + }); + + expect(sharesToBurnAfter).to.equal(0n, "Incorrect shares balance after burn"); + expect(totalEthAfter).to.equal(totalEth, "Incorrect total ETH supply after burn"); + expect(totalSharesAfter).to.equal(totalShares - sharesToBurn, "Incorrect total shares after burn"); + }); +}); diff --git a/test/integration/protocol-happy-path.ts b/test/integration/protocol-happy-path.ts new file mode 100644 index 000000000..eade6bf90 --- /dev/null +++ b/test/integration/protocol-happy-path.ts @@ -0,0 +1,602 @@ +import { expect } from "chai"; +import { ContractTransactionReceipt, Result, TransactionResponse, ZeroAddress } from "ethers"; +import { ethers } from "hardhat"; + +import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers"; + +import { batch, ether, impersonate, log, trace, updateBalance } from "lib"; +import { getProtocolContext, ProtocolContext } from "lib/protocol"; +import { + finalizeWithdrawalQueue, + norEnsureOperators, + OracleReportOptions, + report, + sdvtEnsureOperators, +} from "lib/protocol/helpers"; + +import { Snapshot } from "test/suite"; + +const AMOUNT = ether("100"); +const MAX_DEPOSIT = 150n; +const CURATED_MODULE_ID = 1n; +const SIMPLE_DVT_MODULE_ID = 2n; + +const ZERO_HASH = new Uint8Array(32).fill(0); + +describe("Happy Path", () => { + let ctx: ProtocolContext; + let snapshot: string; + + let ethHolder: HardhatEthersSigner; + let stEthHolder: HardhatEthersSigner; + let stranger: HardhatEthersSigner; + + let uncountedStETHShares: bigint; + let amountWithRewards: bigint; + + before(async () => { + ctx = await getProtocolContext(); + + [stEthHolder, ethHolder, stranger] = await ethers.getSigners(); + + snapshot = await Snapshot.take(); + }); + + after(async () => await Snapshot.restore(snapshot)); + + const getBalances = async (wallet: HardhatEthersSigner) => { + const { lido } = ctx.contracts; + return batch({ + ETH: ethers.provider.getBalance(wallet), + stETH: lido.balanceOf(wallet), + }); + }; + + it("Should finalize withdrawal queue", async () => { + const { lido, withdrawalQueue } = ctx.contracts; + + await finalizeWithdrawalQueue(ctx, stEthHolder, ethHolder); + + const lastFinalizedRequestId = await withdrawalQueue.getLastFinalizedRequestId(); + const lastRequestId = await withdrawalQueue.getLastRequestId(); + + // Will be used in finalization part + uncountedStETHShares = await lido.sharesOf(withdrawalQueue.address); + + // Added to facilitate the burner transfers + const approveTx = await lido.connect(stEthHolder).approve(withdrawalQueue.address, 1000n); + await trace("lido.approve", approveTx); + + const requestWithdrawalsTx = await withdrawalQueue.connect(stEthHolder).requestWithdrawals([1000n], stEthHolder); + await trace("withdrawalQueue.requestWithdrawals", requestWithdrawalsTx); + + expect(lastFinalizedRequestId).to.equal(lastRequestId); + }); + + it("Should have at least 3 node operators in every module", async () => { + await norEnsureOperators(ctx, 3n, 5n); + expect(await ctx.contracts.nor.getNodeOperatorsCount()).to.be.at.least(3n); + + if (ctx.flags.withSimpleDvtModule) { + await sdvtEnsureOperators(ctx, 3n, 5n); + expect(await ctx.contracts.sdvt.getNodeOperatorsCount()).to.be.at.least(3n); + } + }); + + it("Should allow ETH holders to submit 100 ETH stake", async () => { + const { lido } = ctx.contracts; + + await updateBalance(stranger.address, ether("1000000")); + + const strangerBalancesBeforeSubmit = await getBalances(stranger); + + log.debug("Stranger before submit", { + address: stranger.address, + ETH: ethers.formatEther(strangerBalancesBeforeSubmit.ETH), + stETH: ethers.formatEther(strangerBalancesBeforeSubmit.stETH), + }); + + expect(strangerBalancesBeforeSubmit.stETH).to.equal(0n, "stETH balance before submit"); + expect(strangerBalancesBeforeSubmit.ETH).to.equal(ether("1000000"), "ETH balance before submit"); + + const stakeLimitInfoBefore = await lido.getStakeLimitFullInfo(); + + log.debug("Stake limit info before submit", { + "Max stake limit": ethers.formatEther(stakeLimitInfoBefore.maxStakeLimit), + "Max stake limit growth blocks": stakeLimitInfoBefore.maxStakeLimitGrowthBlocks, + }); + + const growthPerBlock = stakeLimitInfoBefore.maxStakeLimit / stakeLimitInfoBefore.maxStakeLimitGrowthBlocks; + + const totalSupplyBeforeSubmit = await lido.totalSupply(); + const bufferedEtherBeforeSubmit = await lido.getBufferedEther(); + const stakingLimitBeforeSubmit = await lido.getCurrentStakeLimit(); + const heightBeforeSubmit = await ethers.provider.getBlockNumber(); + + log.debug("Before submit", { + "Chain height": heightBeforeSubmit, + "Growth per block": ethers.formatEther(growthPerBlock), + "Total supply": ethers.formatEther(totalSupplyBeforeSubmit), + "Buffered ether": ethers.formatEther(bufferedEtherBeforeSubmit), + "Staking limit": ethers.formatEther(stakingLimitBeforeSubmit), + }); + + const tx = await lido.connect(stranger).submit(ZeroAddress, { value: AMOUNT }); + const receipt = await trace("lido.submit", tx); + + expect(receipt).not.to.be.null; + + const strangerBalancesAfterSubmit = await getBalances(stranger); + + log.debug("Stranger after submit", { + address: stranger.address, + ETH: ethers.formatEther(strangerBalancesAfterSubmit.ETH), + stETH: ethers.formatEther(strangerBalancesAfterSubmit.stETH), + }); + + const spendEth = AMOUNT + receipt.gasUsed * receipt.gasPrice; + + expect(strangerBalancesAfterSubmit.stETH).to.be.approximately( + strangerBalancesBeforeSubmit.stETH + AMOUNT, + 10n, + "stETH balance after submit", + ); + expect(strangerBalancesAfterSubmit.ETH).to.be.approximately( + strangerBalancesBeforeSubmit.ETH - spendEth, + 10n, + "ETH balance after submit", + ); + + const submittedEvent = ctx.getEvents(receipt, "Submitted")[0]; + const transferSharesEvent = ctx.getEvents(receipt, "TransferShares")[0]; + const sharesToBeMinted = await lido.getSharesByPooledEth(AMOUNT); + const mintedShares = await lido.sharesOf(stranger); + + expect(submittedEvent?.args.toObject()).to.deep.equal({ + sender: stranger.address, + amount: AMOUNT, + referral: ZeroAddress, + }, "Submitted event"); + + expect(transferSharesEvent?.args.toObject()).to.deep.equal({ + from: ZeroAddress, + to: stranger.address, + sharesValue: sharesToBeMinted, + }, "TransferShares event"); + + expect(mintedShares).to.equal(sharesToBeMinted, "Minted shares"); + + const totalSupplyAfterSubmit = await lido.totalSupply(); + const bufferedEtherAfterSubmit = await lido.getBufferedEther(); + const stakingLimitAfterSubmit = await lido.getCurrentStakeLimit(); + + expect(totalSupplyAfterSubmit).to.equal(totalSupplyBeforeSubmit + AMOUNT, "Total supply after submit"); + expect(bufferedEtherAfterSubmit).to.equal(bufferedEtherBeforeSubmit + AMOUNT, "Buffered ether after submit"); + + if (stakingLimitBeforeSubmit >= stakeLimitInfoBefore.maxStakeLimit - growthPerBlock) { + expect(stakingLimitAfterSubmit).to.equal( + stakingLimitBeforeSubmit - AMOUNT, + "Staking limit after submit without growth", + ); + } else { + expect(stakingLimitAfterSubmit).to.equal( + stakingLimitBeforeSubmit - AMOUNT + growthPerBlock, + "Staking limit after submit", + ); + } + }); + + it("Should deposit 100 ETH to node operators", async () => { + const { lido, withdrawalQueue } = ctx.contracts; + + const { depositSecurityModule } = ctx.contracts; + const { depositedValidators: depositedValidatorsBefore } = await lido.getBeaconStat(); + const withdrawalsUninitializedStETH = await withdrawalQueue.unfinalizedStETH(); + const depositableEther = await lido.getDepositableEther(); + const bufferedEtherBeforeDeposit = await lido.getBufferedEther(); + + const expectedDepositableEther = bufferedEtherBeforeDeposit - withdrawalsUninitializedStETH; + + expect(depositableEther).to.equal(expectedDepositableEther, "Depositable ether"); + + log.debug("Depositable ether", { + "Buffered ether": ethers.formatEther(bufferedEtherBeforeDeposit), + "Withdrawals uninitialized stETH": ethers.formatEther(withdrawalsUninitializedStETH), + "Depositable ether": ethers.formatEther(depositableEther), + }); + + const dsmSigner = await impersonate(depositSecurityModule.address, ether("100")); + + const depositNorTx = await lido.connect(dsmSigner).deposit(MAX_DEPOSIT, CURATED_MODULE_ID, ZERO_HASH); + const depositNorReceipt = await trace("lido.deposit (Curated Module)", depositNorTx); + + const unbufferedEventNor = ctx.getEvents(depositNorReceipt, "Unbuffered")[0]; + const unbufferedAmountNor = unbufferedEventNor.args[0]; + + const depositCountsNor = unbufferedAmountNor / ether("32"); + let expectedBufferedEtherAfterDeposit = bufferedEtherBeforeDeposit - unbufferedAmountNor; + + if (ctx.flags.withSimpleDvtModule) { + const depositSdvtTx = await lido.connect(dsmSigner).deposit(MAX_DEPOSIT, SIMPLE_DVT_MODULE_ID, ZERO_HASH); + const depositSdvtReceipt = await trace("lido.deposit (Simple DVT)", depositSdvtTx); + + const unbufferedEventSdvt = ctx.getEvents(depositSdvtReceipt, "Unbuffered")[0]; + const depositedValidatorsChangedEventSdvt = ctx.getEvents(depositSdvtReceipt, "DepositedValidatorsChanged")[0]; + + const unbufferedAmountSdvt = unbufferedEventSdvt.args[0]; + const newValidatorsCountSdvt = depositedValidatorsChangedEventSdvt.args[0]; + + const depositCountsTotal = depositCountsNor + unbufferedAmountSdvt / ether("32"); + expectedBufferedEtherAfterDeposit -= unbufferedAmountSdvt; + + expect(depositCountsTotal).to.be.gt(0n, "Deposit counts"); + expect(newValidatorsCountSdvt).to.equal(depositedValidatorsBefore + depositCountsTotal, "New validators count after deposit"); + } + + const bufferedEtherAfterDeposit = await lido.getBufferedEther(); + + expect(depositCountsNor).to.be.gt(0n, "Deposit counts"); + expect(bufferedEtherAfterDeposit).to.equal(expectedBufferedEtherAfterDeposit, "Buffered ether after deposit"); + + log.debug("After deposit", { + "Buffered ether": ethers.formatEther(bufferedEtherAfterDeposit), + "Unbuffered amount (NOR)": ethers.formatEther(unbufferedAmountNor), + }); + }); + + it("Should rebase correctly", async () => { + const { lido, withdrawalQueue, locator, burner, nor, sdvt } = ctx.contracts; + + const treasuryAddress = await locator.treasury(); + const strangerBalancesBeforeRebase = await getBalances(stranger); + + log.debug("Stranger before rebase", { + address: stranger.address, + ETH: ethers.formatEther(strangerBalancesBeforeRebase.ETH), + stETH: ethers.formatEther(strangerBalancesBeforeRebase.stETH), + }); + + const getNodeOperatorsStatus = async (registry: typeof sdvt | typeof nor) => { + const totalOperators = await registry.getNodeOperatorsCount(); + let hasPenalizedOperators = false; + let activeOperators = 0n; + + for (let i = 0n; i < totalOperators; i++) { + const operator = await registry.getNodeOperator(i, false); + hasPenalizedOperators ||= await registry.isOperatorPenalized(i); + + if (operator.totalDepositedValidators > operator.totalExitedValidators) { + activeOperators++; + } + } + + return { hasPenalizedOperators, activeOperators }; + }; + + const norStatus = await getNodeOperatorsStatus(nor); + + let expectedBurnerTransfers = norStatus.hasPenalizedOperators ? 1n : 0n; + let expectedTransfers = norStatus.activeOperators; + + let sdvtStatusLog = {}; + if (ctx.flags.withSimpleDvtModule) { + const sdvtStatus = await getNodeOperatorsStatus(sdvt); + + expectedBurnerTransfers += sdvtStatus.hasPenalizedOperators ? 1n : 0n; + expectedTransfers += sdvtStatus.activeOperators; + + sdvtStatusLog = { + "SDVT active operators": sdvtStatus.activeOperators, + "SDVT (transfer to burner)": sdvtStatus.hasPenalizedOperators, + }; + } + + log.debug("Expected distributions", { + "NOR active operators": norStatus.activeOperators, + "NOR (transfer to burner)": norStatus.hasPenalizedOperators, + ...sdvtStatusLog, + }); + + const treasuryBalanceBeforeRebase = await lido.sharesOf(treasuryAddress); + + // Stranger deposited 100 ETH, enough to deposit 3 validators, need to reflect this in the report + // 0.01 ETH is added to the clDiff to simulate some rewards + const reportData: Partial = { + clDiff: ether("96.01"), + clAppearedValidators: 3n, + }; + + const { reportTx, extraDataTx } = (await report(ctx, reportData)) as { + reportTx: TransactionResponse; + extraDataTx: TransactionResponse; + }; + + log.debug("Oracle report", { + "Report transaction": reportTx.hash, + "Extra data transaction": extraDataTx.hash, + }); + + const strangerBalancesAfterRebase = await getBalances(stranger); + const treasuryBalanceAfterRebase = await lido.sharesOf(treasuryAddress); + + const reportTxReceipt = (await reportTx.wait()) as ContractTransactionReceipt; + const extraDataTxReceipt = (await extraDataTx.wait()) as ContractTransactionReceipt; + + const tokenRebasedEvent = ctx.getEvents(reportTxReceipt, "TokenRebased")[0]; + + expect(tokenRebasedEvent).not.to.be.undefined; + + const transferEvents = ctx.getEvents(reportTxReceipt, "Transfer"); + + const toBurnerTransfer = transferEvents[0]; + const toNorTransfer = transferEvents[1]; + const toSdvtTransfer = ctx.flags.withSimpleDvtModule ? transferEvents[2] : undefined; + const toTreasuryTransfer = ctx.flags.withSimpleDvtModule ? transferEvents[3] : transferEvents[2]; + + const expectedTransferEvents = ctx.flags.withSimpleDvtModule ? 4 : 3; + + expect(transferEvents.length).to.equal(expectedTransferEvents, "Transfer events count"); + + expect(toBurnerTransfer?.args.toObject()).to.include({ + from: withdrawalQueue.address, + to: burner.address, + }, "Transfer to burner"); + + expect(toNorTransfer?.args.toObject()).to.include({ + from: ZeroAddress, + to: nor.address, + }, "Transfer to NOR"); + + if (ctx.flags.withSimpleDvtModule) { + expect(toSdvtTransfer?.args.toObject()).to.include({ + from: ZeroAddress, + to: sdvt.address, + }, "Transfer to SDVT"); + } + + expect(toTreasuryTransfer?.args.toObject()).to.include({ + from: ZeroAddress, + to: treasuryAddress, + }, "Transfer to Treasury"); + + const treasurySharesMinted = await lido.getSharesByPooledEth(toTreasuryTransfer.args.value); + + expect(treasuryBalanceAfterRebase).to.be.approximately( + treasuryBalanceBeforeRebase + treasurySharesMinted, + 10n, + "Treasury balance after rebase", + ); + + expect(treasuryBalanceAfterRebase).to.be.gt(treasuryBalanceBeforeRebase, "Treasury balance after rebase increased"); + expect(strangerBalancesAfterRebase.stETH).to.be.gt( + strangerBalancesBeforeRebase.stETH, + "Stranger stETH balance after rebase increased", + ); + + const transfers = ctx.getEvents(extraDataTxReceipt, "Transfer"); + const burnerTransfers = transfers.filter(e => e?.args[1] == burner.address).length; + + expect(burnerTransfers).to.equal(expectedBurnerTransfers, "Burner transfers is correct"); + + expect(transfers.length).to.equal(expectedTransfers + expectedBurnerTransfers, "All active operators received transfers"); + + log.debug("Transfers", { + "Transfers to operators": expectedTransfers, + "Burner transfers": burnerTransfers, + }); + + expect(ctx.getEvents(reportTxReceipt, "TokenRebased")[0]).not.to.be.undefined; + expect(ctx.getEvents(reportTxReceipt, "WithdrawalsFinalized")[0]).not.to.be.undefined; + + const burntSharesEvent = ctx.getEvents(reportTxReceipt, "StETHBurnt")[0]; + + expect(burntSharesEvent).not.to.be.undefined; + + const burntShares: bigint = burntSharesEvent.args[2]; + const [, , preTotalShares, , postTotalShares, , sharesMintedAsFees] = tokenRebasedEvent.args; + + expect(postTotalShares).to.equal(preTotalShares + sharesMintedAsFees - burntShares, "Post total shares"); + }); + + it("Should allow stETH holder to request withdrawals", async () => { + const { lido, withdrawalQueue } = ctx.contracts; + + const withdrawalsFromStrangerBeforeRequest = await withdrawalQueue.connect(stranger).getWithdrawalRequests(stranger); + + expect(withdrawalsFromStrangerBeforeRequest.length).to.equal(0, "Withdrawals from stranger"); + + const balanceBeforeRequest = await getBalances(stranger); + + log.debug("Stranger withdrawals before request", { + address: stranger.address, + withdrawals: withdrawalsFromStrangerBeforeRequest.length, + ETH: ethers.formatEther(balanceBeforeRequest.ETH), + stETH: ethers.formatEther(balanceBeforeRequest.stETH), + }); + + amountWithRewards = balanceBeforeRequest.stETH; + + const approveTx = await lido.connect(stranger).approve(withdrawalQueue.address, amountWithRewards); + const approveTxReceipt = await trace("lido.approve", approveTx); + + const approveEvent = ctx.getEvents(approveTxReceipt, "Approval")[0]; + + expect(approveEvent?.args.toObject()).to.deep.include({ + owner: stranger.address, + spender: withdrawalQueue.address, + value: amountWithRewards, + }, "Approval event"); + + const lastRequestIdBefore = await withdrawalQueue.getLastRequestId(); + + const withdrawalTx = await withdrawalQueue.connect(stranger).requestWithdrawals([amountWithRewards], stranger); + const withdrawalTxReceipt = await trace("withdrawalQueue.requestWithdrawals", withdrawalTx); + + const withdrawalEvent = ctx.getEvents(withdrawalTxReceipt, "WithdrawalRequested")[0]; + + expect(withdrawalEvent?.args.toObject()).to.deep.include({ + requestor: stranger.address, + owner: stranger.address, + amountOfStETH: amountWithRewards, + }, "WithdrawalRequested event"); + + const requestId = withdrawalEvent.args.requestId; + const withdrawalTransferEvents = ctx.getEvents(withdrawalTxReceipt, "Transfer"); + + expect(withdrawalTransferEvents.length).to.be.least(2, "Transfer events count"); + + const [stEthTransfer, unstEthTransfer] = withdrawalTransferEvents; + + expect(stEthTransfer?.args.toObject()).to.deep.include({ + from: stranger.address, + to: withdrawalQueue.address, + value: amountWithRewards, + }, "Transfer stETH"); + + expect(unstEthTransfer?.args.toObject()).to.deep.include({ + from: ZeroAddress, + to: stranger.address, + tokenId: requestId, + }, "Transfer unstETH"); + + const balanceAfterRequest = await getBalances(stranger); + + const withdrawalsFromStrangerAfterRequest = await withdrawalQueue.connect(stranger).getWithdrawalRequests(stranger); + const [status] = await withdrawalQueue.getWithdrawalStatus([requestId]); + + log.debug("Stranger withdrawals after request", { + address: stranger.address, + withdrawals: withdrawalsFromStrangerAfterRequest.length, + ETH: ethers.formatEther(balanceAfterRequest.ETH), + stETH: ethers.formatEther(balanceAfterRequest.stETH), + }); + + expect(withdrawalsFromStrangerAfterRequest.length).to.equal(1, "Withdrawals from stranger after request"); + expect(status.isFinalized).to.be.false; + + expect(balanceAfterRequest.stETH).to.be.approximately(0, 10n, "stETH balance after request"); + + const lastRequestIdAfter = await withdrawalQueue.getLastRequestId(); + expect(lastRequestIdAfter).to.equal(lastRequestIdBefore + 1n, "Last request ID after request"); + }); + + it("Should finalize withdrawals", async () => { + const { lido, withdrawalQueue } = ctx.contracts; + + log.debug("Finalizing withdrawals", { + "Uncounted stETH shares": ethers.formatEther(uncountedStETHShares), + "Amount with rewards": ethers.formatEther(amountWithRewards), + }); + + const uncountedStETHBalanceBeforeFinalization = await lido.getPooledEthByShares(uncountedStETHShares); + const withdrawalQueueBalanceBeforeFinalization = await lido.balanceOf(withdrawalQueue.address); + const expectedWithdrawalAmount = amountWithRewards + uncountedStETHBalanceBeforeFinalization; + + log.debug("Withdrawal queue balance before finalization", { + "Uncounted stETH balance": ethers.formatEther(uncountedStETHBalanceBeforeFinalization), + "Withdrawal queue balance": ethers.formatEther(withdrawalQueueBalanceBeforeFinalization), + "Expected withdrawal amount": ethers.formatEther(expectedWithdrawalAmount), + }); + + expect(withdrawalQueueBalanceBeforeFinalization).to.be.approximately(expectedWithdrawalAmount, 10n, "Withdrawal queue balance before finalization"); + + const lockedEtherAmountBeforeFinalization = await withdrawalQueue.getLockedEtherAmount(); + + const reportParams = { clDiff: ether("0.0005") }; // simulate some rewards + const { reportTx } = (await report(ctx, reportParams)) as { reportTx: TransactionResponse }; + + const reportTxReceipt = (await reportTx.wait()) as ContractTransactionReceipt; + + const requestId = await withdrawalQueue.getLastRequestId(); + + const lockedEtherAmountAfterFinalization = await withdrawalQueue.getLockedEtherAmount(); + const expectedLockedEtherAmountAfterFinalization = lockedEtherAmountAfterFinalization - amountWithRewards; + + log.debug("Locked ether amount", { + "Before finalization": ethers.formatEther(lockedEtherAmountBeforeFinalization), + "After finalization": ethers.formatEther(lockedEtherAmountAfterFinalization), + "Amount with rewards": ethers.formatEther(amountWithRewards), + }); + + expect(lockedEtherAmountBeforeFinalization).to.equal(expectedLockedEtherAmountAfterFinalization, "Locked ether amount after finalization"); + + const withdrawalFinalizedEvent = ctx.getEvents(reportTxReceipt, "WithdrawalsFinalized")[0]; + + expect(withdrawalFinalizedEvent?.args.toObject()).to.deep.include({ + amountOfETHLocked: amountWithRewards, + from: requestId, + to: requestId, + }, "WithdrawalFinalized event"); + + const withdrawalQueueBalanceAfterFinalization = await lido.balanceOf(withdrawalQueue.address); + const uncountedStETHBalanceAfterFinalization = await lido.getPooledEthByShares(uncountedStETHShares); + + expect(withdrawalQueueBalanceAfterFinalization).to.equal(uncountedStETHBalanceAfterFinalization, "Withdrawal queue balance after finalization"); + }); + + it("Should claim withdrawals", async () => { + const { withdrawalQueue } = ctx.contracts; + + const lockedEtherAmountBeforeWithdrawal = await withdrawalQueue.getLockedEtherAmount(); + + const lastCheckpointIndex = await withdrawalQueue.getLastCheckpointIndex(); + const requestId = await withdrawalQueue.getLastRequestId(); + + // in fact, it's a proxy and not a real array, so we need to convert it to array + const hintsProxy = await withdrawalQueue.findCheckpointHints([requestId], 1n, lastCheckpointIndex) as Result; + const hints = hintsProxy.toArray(); + + const [claimableEtherBeforeClaim] = await withdrawalQueue.getClaimableEther([requestId], hints); + const [status] = await withdrawalQueue.getWithdrawalStatus([requestId]); + + const balanceBeforeClaim = await getBalances(stranger); + + expect(status.isFinalized).to.be.true; + expect(claimableEtherBeforeClaim).to.equal(amountWithRewards, "Claimable ether before claim"); + + const claimTx = await withdrawalQueue.connect(stranger).claimWithdrawals([requestId], hints); + const claimTxReceipt = await trace("withdrawalQueue.claimWithdrawals", claimTx); + + const spentGas = claimTxReceipt.gasUsed * claimTxReceipt.gasPrice; + + const claimEvent = ctx.getEvents(claimTxReceipt, "WithdrawalClaimed")[0]; + + expect(claimEvent?.args.toObject()).to.deep.include({ + requestId, + owner: stranger.address, + receiver: stranger.address, + amountOfETH: amountWithRewards, + }, "WithdrawalClaimed event"); + + const transferEvent = ctx.getEvents(claimTxReceipt, "Transfer")[0]; + + expect(transferEvent?.args.toObject()).to.deep.include({ + from: stranger.address, + to: ZeroAddress, + tokenId: requestId, + }, "Transfer event"); + + const balanceAfterClaim = await getBalances(stranger); + + expect(balanceAfterClaim.ETH).to.equal(balanceBeforeClaim.ETH + amountWithRewards - spentGas, "ETH balance after claim"); + + const lockedEtherAmountAfterClaim = await withdrawalQueue.getLockedEtherAmount(); + + log.debug("Locked ether amount", { + "Before withdrawal": ethers.formatEther(lockedEtherAmountBeforeWithdrawal), + "After claim": ethers.formatEther(lockedEtherAmountAfterClaim), + "Amount with rewards": ethers.formatEther(amountWithRewards), + }); + + expect(lockedEtherAmountAfterClaim).to.equal(lockedEtherAmountBeforeWithdrawal - amountWithRewards, "Locked ether amount after claim"); + + const [statusAfterClaim] = await withdrawalQueue.connect(stranger).getWithdrawalStatus([requestId]); + + expect(statusAfterClaim.isFinalized).to.be.true; + expect(statusAfterClaim.isClaimed).to.be.true; + + const [claimableEtherAfterClaim] = await withdrawalQueue.getClaimableEther([requestId], hints); + + expect(claimableEtherAfterClaim).to.equal(0, "Claimable ether after claim"); + }); +}); diff --git a/test/suite/index.ts b/test/suite/index.ts index 7a3211f31..36aaa83b1 100644 --- a/test/suite/index.ts +++ b/test/suite/index.ts @@ -1 +1,2 @@ export { Snapshot, resetState } from "./snapshot"; +export { Tracing } from "./tracing"; diff --git a/test/suite/snapshot.ts b/test/suite/snapshot.ts index c977f5e9f..babafc950 100644 --- a/test/suite/snapshot.ts +++ b/test/suite/snapshot.ts @@ -11,11 +11,15 @@ export class Snapshot { public static async restore(snapshot: string) { const result = await Snapshot.provider.send("evm_revert", [snapshot]); - if (!result) throw new Error("`evm_revert` failed."); + if (!result) { + throw new Error("`evm_revert` failed."); + } } public static async refresh(snapshot: string) { - if (snapshot) await Snapshot.restore(snapshot); + if (snapshot) { + await Snapshot.restore(snapshot); + } return Snapshot.take(); } diff --git a/test/suite/tracing.ts b/test/suite/tracing.ts new file mode 100644 index 000000000..eaa5176d6 --- /dev/null +++ b/test/suite/tracing.ts @@ -0,0 +1,15 @@ +import hre from "hardhat"; + +export class Tracing { + public static enable() { + hre.tracer.enabled = true; + } + + public static disable() { + hre.tracer.enabled = false; + } + + get tracer() { + return hre.tracer; + } +} diff --git a/tsconfig.json b/tsconfig.json index b87bf5af1..bfa17d903 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,5 +15,5 @@ } }, "include": ["./test", "./typechain-types", "./lib", "./scripts"], - "files": ["./hardhat.config.ts", "./commitlint.config.ts"] + "files": ["./hardhat.config.ts", "./commitlint.config.ts", "./globals.d.ts"] } diff --git a/yarn.lock b/yarn.lock index b4bd98f05..797760536 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1122,67 +1122,67 @@ __metadata: languageName: node linkType: hard -"@nomicfoundation/edr-darwin-arm64@npm:0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr-darwin-arm64@npm:0.4.0" - checksum: 10c0/4afa3e26fb1f5a2cd7f83859a560319771b652edb881546d908e813394b840bfbe36b5cb24a0b341b5c5846dc457c3215e10bdf67cf8e14aae8e8989c12994d5 +"@nomicfoundation/edr-darwin-arm64@npm:0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr-darwin-arm64@npm:0.5.0" + checksum: 10c0/30a2df0cc2a7bf3e1f1b82537a6efb7dd78648c83a82190617c2961f6a2be720ab954c6664a0043467d59776b8c4492ca337a93fac6405089f90a468c1b37efd languageName: node linkType: hard -"@nomicfoundation/edr-darwin-x64@npm:0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr-darwin-x64@npm:0.4.0" - checksum: 10c0/c6d9ee617279cab3fe60aada04b4a224d6323fb9731a297517468b35400cc8955c9c07c5ea8538b092c00d3d34652e84c53c125926c703e89e685bd9d41bb46e +"@nomicfoundation/edr-darwin-x64@npm:0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr-darwin-x64@npm:0.5.0" + checksum: 10c0/4f16840d5893c2599f625bf3cbe1a54f136d504238738bec840975d28926f12ed0a80ec657b5e59ef060b257d8bb7e9329470e2767021f76a6b71aa775f30ed7 languageName: node linkType: hard -"@nomicfoundation/edr-linux-arm64-gnu@npm:0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr-linux-arm64-gnu@npm:0.4.0" - checksum: 10c0/b6143aa80ec2d757fe27157c1c9f1de012644fefa97893d0ff4c7a5f9d140006dbb1f78b438a543b42333cc01c2b54c203cb0f7985a5ae4c2f1522c41345cd7c +"@nomicfoundation/edr-linux-arm64-gnu@npm:0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr-linux-arm64-gnu@npm:0.5.0" + checksum: 10c0/f3840e01f41d50eff181341e410e6e6000090e98aae9f14190bf8f1c8d0794b348d5bfe6266c30ea8a2d3011d116dca75788e172c6bcc96645f75d0d751d44f2 languageName: node linkType: hard -"@nomicfoundation/edr-linux-arm64-musl@npm:0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr-linux-arm64-musl@npm:0.4.0" - checksum: 10c0/8b522fcf275b693837d61f8aff7a23771c07ffa754f7ccb0de072f95cc51d0a014be3b6083ee7b643463670a4496fbd5ebeeb90bb1b7967627044011bfb55bee +"@nomicfoundation/edr-linux-arm64-musl@npm:0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr-linux-arm64-musl@npm:0.5.0" + checksum: 10c0/a304b62bca1698123555228cf33fdd5e5be7f310de7e59b44378f4a8afbc2f44f24789c0b0a6957ec7becadc206cfee260a1f9aa9ead2994831e97b7cce40c70 languageName: node linkType: hard -"@nomicfoundation/edr-linux-x64-gnu@npm:0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr-linux-x64-gnu@npm:0.4.0" - checksum: 10c0/0a1fdbbd0e4afb003e970db4443412727a538fe25fa1593411f2cb1208f7ebc9ef615e1842a40e526934ae5fb2a3805d523bee1a32abc82b8cd0b3832648c247 +"@nomicfoundation/edr-linux-x64-gnu@npm:0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr-linux-x64-gnu@npm:0.5.0" + checksum: 10c0/f260ed524d9f0fb62c67732974a856265e8c0efe8e8b3baa9464c347dee87071243551692c544a6a171112f6bbf27d4fddb0e2bb3de6f8b26042d131d0eb74e5 languageName: node linkType: hard -"@nomicfoundation/edr-linux-x64-musl@npm:0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr-linux-x64-musl@npm:0.4.0" - checksum: 10c0/230e3f2e4c8f518e7c31ed1b36321956a433f2ec666043f04dc3de00f61173991ed5c0d7ed432a0ae8a14f075459aa9861506dcb7d7b310bf6e158c6f299f89b +"@nomicfoundation/edr-linux-x64-musl@npm:0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr-linux-x64-musl@npm:0.5.0" + checksum: 10c0/037af5a33e9c420c38eaf80897e1e90b7ce9ca9172c25076afc8056a323cf21cfaf767e54eed6c40297c62eeea51fa02a205b906a0a370cd79c001e808d17aee languageName: node linkType: hard -"@nomicfoundation/edr-win32-x64-msvc@npm:0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr-win32-x64-msvc@npm:0.4.0" - checksum: 10c0/b8e4088d5787779842b82a0f4c0aa81b66d69e5ce7fcbf050b883c74df99bd7d56d963fb3fb9ce6ff9cbfe5123a3fada2e8d521952a90c9b768130dc2e465516 +"@nomicfoundation/edr-win32-x64-msvc@npm:0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr-win32-x64-msvc@npm:0.5.0" + checksum: 10c0/b4ff59e6c776926e154b10895928c8fbb8ca6144ddd324e9a2a71359c15eb5ed53b884902cc0e2515f720472a5a3736ee02a8d2e97fd68fcf1bc4d385c6ddd83 languageName: node linkType: hard -"@nomicfoundation/edr@npm:^0.4.0": - version: 0.4.0 - resolution: "@nomicfoundation/edr@npm:0.4.0" +"@nomicfoundation/edr@npm:^0.5.0": + version: 0.5.0 + resolution: "@nomicfoundation/edr@npm:0.5.0" dependencies: - "@nomicfoundation/edr-darwin-arm64": "npm:0.4.0" - "@nomicfoundation/edr-darwin-x64": "npm:0.4.0" - "@nomicfoundation/edr-linux-arm64-gnu": "npm:0.4.0" - "@nomicfoundation/edr-linux-arm64-musl": "npm:0.4.0" - "@nomicfoundation/edr-linux-x64-gnu": "npm:0.4.0" - "@nomicfoundation/edr-linux-x64-musl": "npm:0.4.0" - "@nomicfoundation/edr-win32-x64-msvc": "npm:0.4.0" - checksum: 10c0/8fa3cdcad7c9525f6c38c0096f33040eaed5f618c75edb958ed6f2220ee07d1f0b5f0b3794834fe551c9b819ae71cfd1140275dc602e0a3350b1130ff10747ec + "@nomicfoundation/edr-darwin-arm64": "npm:0.5.0" + "@nomicfoundation/edr-darwin-x64": "npm:0.5.0" + "@nomicfoundation/edr-linux-arm64-gnu": "npm:0.5.0" + "@nomicfoundation/edr-linux-arm64-musl": "npm:0.5.0" + "@nomicfoundation/edr-linux-x64-gnu": "npm:0.5.0" + "@nomicfoundation/edr-linux-x64-musl": "npm:0.5.0" + "@nomicfoundation/edr-win32-x64-msvc": "npm:0.5.0" + checksum: 10c0/623eb30538789290c9c99034a21d40531775cbaae43fc13bb7d0735185becca4146595724dfa3fb183fc146ce091650c454d51684b30d5358e17d3619abad240 languageName: node linkType: hard @@ -1266,25 +1266,25 @@ __metadata: languageName: node linkType: hard -"@nomicfoundation/hardhat-ignition-ethers@npm:^0.15.4": - version: 0.15.4 - resolution: "@nomicfoundation/hardhat-ignition-ethers@npm:0.15.4" +"@nomicfoundation/hardhat-ignition-ethers@npm:^0.15.5": + version: 0.15.5 + resolution: "@nomicfoundation/hardhat-ignition-ethers@npm:0.15.5" peerDependencies: "@nomicfoundation/hardhat-ethers": ^3.0.4 - "@nomicfoundation/hardhat-ignition": ^0.15.4 - "@nomicfoundation/ignition-core": ^0.15.4 + "@nomicfoundation/hardhat-ignition": ^0.15.5 + "@nomicfoundation/ignition-core": ^0.15.5 ethers: ^6.7.0 hardhat: ^2.18.0 - checksum: 10c0/aa172c985a326852d2304c125ed47ade7a4057311df0f2271b0623fbfb532baa0cea8d38eac1464ed20b81906ac39db1cf13317c213118fcce34936804a9efcc + checksum: 10c0/19f0e029a580dd4d27048f1e87f8111532684cf7f0a2b5c8d6ae8d811ff489629305e3a616cb89702421142c7c628f1efa389781414de1279689018c463cce60 languageName: node linkType: hard -"@nomicfoundation/hardhat-ignition@npm:^0.15.4": - version: 0.15.4 - resolution: "@nomicfoundation/hardhat-ignition@npm:0.15.4" +"@nomicfoundation/hardhat-ignition@npm:^0.15.5": + version: 0.15.5 + resolution: "@nomicfoundation/hardhat-ignition@npm:0.15.5" dependencies: - "@nomicfoundation/ignition-core": "npm:^0.15.4" - "@nomicfoundation/ignition-ui": "npm:^0.15.4" + "@nomicfoundation/ignition-core": "npm:^0.15.5" + "@nomicfoundation/ignition-ui": "npm:^0.15.5" chalk: "npm:^4.0.0" debug: "npm:^4.3.2" fs-extra: "npm:^10.0.0" @@ -1292,7 +1292,7 @@ __metadata: peerDependencies: "@nomicfoundation/hardhat-verify": ^2.0.1 hardhat: ^2.18.0 - checksum: 10c0/82d2c2e001a736aa70a010f4d56da888d4b3da42237a8fb40a9027208b5963be6b8f1f7db5f719ef6770e35e9aca63e9c038820cc2f0ff2ef90c259fc3010ba7 + checksum: 10c0/b3d9755f2bf89157b6ae0cb6cebea264f76f556ae0b3fc5a62afb5e0f6ed70b3d82d8f692b1c49b2ef2d60cdb45ee28fb148cfca1aa5a53bfe37772c71e75a08 languageName: node linkType: hard @@ -1333,9 +1333,9 @@ __metadata: languageName: node linkType: hard -"@nomicfoundation/hardhat-verify@npm:^2.0.8": - version: 2.0.8 - resolution: "@nomicfoundation/hardhat-verify@npm:2.0.8" +"@nomicfoundation/hardhat-verify@npm:^2.0.9": + version: 2.0.9 + resolution: "@nomicfoundation/hardhat-verify@npm:2.0.9" dependencies: "@ethersproject/abi": "npm:^5.1.2" "@ethersproject/address": "npm:^5.0.2" @@ -1347,14 +1347,14 @@ __metadata: table: "npm:^6.8.0" undici: "npm:^5.14.0" peerDependencies: - hardhat: ^2.0.4 - checksum: 10c0/1f517800b466580098b7ba4b6786d4c8018d9023b9b7dd197971a16903ff66e66256913341ae1586bf7d9184de25ac83e5e1115f18490de30a7f157a10804523 + hardhat: "*" + checksum: 10c0/e70a060dbc657f77c0b55a35ab1707720b2eb7aba96f28084a37fbe82fc41ae54d89f148144af366802112feb6a09823ddd4607cd73af2c497a0035f9679b290 languageName: node linkType: hard -"@nomicfoundation/ignition-core@npm:^0.15.4": - version: 0.15.4 - resolution: "@nomicfoundation/ignition-core@npm:0.15.4" +"@nomicfoundation/ignition-core@npm:^0.15.5": + version: 0.15.5 + resolution: "@nomicfoundation/ignition-core@npm:0.15.5" dependencies: "@ethersproject/address": "npm:5.6.1" "@nomicfoundation/solidity-analyzer": "npm:^0.1.1" @@ -1365,14 +1365,14 @@ __metadata: immer: "npm:10.0.2" lodash: "npm:4.17.21" ndjson: "npm:2.0.0" - checksum: 10c0/be9e1e8a4fe145f6d137a288019c9f4aa3a1f9ee92f3b18e8c026e7eed9badc8cf6817916c70dff70f540c5938c340cb78a1d2809913afe67fd93a4e5d6f3983 + checksum: 10c0/ff14724d8e992dc54291da6e6a864f6b3db268b6725d0af6ecbf3f81ed65f6824441421b23129d118cd772efc8ab0275d1decf203019cb3049a48b37f9c15432 languageName: node linkType: hard -"@nomicfoundation/ignition-ui@npm:^0.15.4": - version: 0.15.4 - resolution: "@nomicfoundation/ignition-ui@npm:0.15.4" - checksum: 10c0/e2b8d70ef75e1ccddaf4087534884de3b1b1ee35acbbf59111f729842e7663654e7b042aa0abefdcb6460fd7ffec22c89212279198e698b13e3b771072f4a581 +"@nomicfoundation/ignition-ui@npm:^0.15.5": + version: 0.15.5 + resolution: "@nomicfoundation/ignition-ui@npm:0.15.5" + checksum: 10c0/7d10e30c3078731e4feb91bd7959dfb5a0eeac6f34f6261fada2bf330ff8057ecd576ce0fb3fe856867af2d7c67f31bd75a896110b58d93ff3f27f04f6771278 languageName: node linkType: hard @@ -2012,10 +2012,10 @@ __metadata: languageName: node linkType: hard -"@types/mocha@npm:10.0.6": - version: 10.0.6 - resolution: "@types/mocha@npm:10.0.6" - checksum: 10c0/4526c9e88388f9e1004c6d3937c5488a39908810f26b927173c58d52b43057f3895627dc06538e96706e08b88158885f869ec6311f6b58fd72bdef715f26d6c3 +"@types/mocha@npm:10.0.7": + version: 10.0.7 + resolution: "@types/mocha@npm:10.0.7" + checksum: 10c0/48a2df4dd02b6e66a11129dca6a23cf0cc3995faf8525286eb851043685bd8b7444780f4bb29a1c42df7559ed63294e5308bfce3a6b862ad2e0359cb21c21329 languageName: node linkType: hard @@ -2035,12 +2035,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:20.14.2": - version: 20.14.2 - resolution: "@types/node@npm:20.14.2" +"@types/node@npm:20.14.13": + version: 20.14.13 + resolution: "@types/node@npm:20.14.13" dependencies: undici-types: "npm:~5.26.4" - checksum: 10c0/2d86e5f2227aaa42212e82ea0affe72799111b888ff900916376450b02b09b963ca888b20d9c332d8d2b833ed4781987867a38eaa2e4863fa8439071468b0a6f + checksum: 10c0/10bb3ece675308742301c652ab8c6cb88b1ebddebed22316103c58f94fe7eff131edd5f679e487c19077fadb6b5e6b1ad9a60a2cee2869aa1f20452b9761d570 languageName: node linkType: hard @@ -2090,15 +2090,15 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/eslint-plugin@npm:^7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/eslint-plugin@npm:7.12.0" +"@typescript-eslint/eslint-plugin@npm:^7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/eslint-plugin@npm:7.18.0" dependencies: "@eslint-community/regexpp": "npm:^4.10.0" - "@typescript-eslint/scope-manager": "npm:7.12.0" - "@typescript-eslint/type-utils": "npm:7.12.0" - "@typescript-eslint/utils": "npm:7.12.0" - "@typescript-eslint/visitor-keys": "npm:7.12.0" + "@typescript-eslint/scope-manager": "npm:7.18.0" + "@typescript-eslint/type-utils": "npm:7.18.0" + "@typescript-eslint/utils": "npm:7.18.0" + "@typescript-eslint/visitor-keys": "npm:7.18.0" graphemer: "npm:^1.4.0" ignore: "npm:^5.3.1" natural-compare: "npm:^1.4.0" @@ -2109,44 +2109,44 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 10c0/abf899e07144e8edd8ae010d25e4679e2acded407a10efc6aaa7ee325af8daf0dd149946ad58e46982e29e0a23f56b1e0dd461ef09aab09b0d94fc24ffc827c2 + checksum: 10c0/2b37948fa1b0dab77138909dabef242a4d49ab93e4019d4ef930626f0a7d96b03e696cd027fa0087881c20e73be7be77c942606b4a76fa599e6b37f6985304c3 languageName: node linkType: hard -"@typescript-eslint/parser@npm:^7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/parser@npm:7.12.0" +"@typescript-eslint/parser@npm:^7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/parser@npm:7.18.0" dependencies: - "@typescript-eslint/scope-manager": "npm:7.12.0" - "@typescript-eslint/types": "npm:7.12.0" - "@typescript-eslint/typescript-estree": "npm:7.12.0" - "@typescript-eslint/visitor-keys": "npm:7.12.0" + "@typescript-eslint/scope-manager": "npm:7.18.0" + "@typescript-eslint/types": "npm:7.18.0" + "@typescript-eslint/typescript-estree": "npm:7.18.0" + "@typescript-eslint/visitor-keys": "npm:7.18.0" debug: "npm:^4.3.4" peerDependencies: eslint: ^8.56.0 peerDependenciesMeta: typescript: optional: true - checksum: 10c0/223c32a6ba6cee770ee39108fb0a6d132283673d44c751bec85d8792df3382ddb839617787d183dc8fd7686d8a2018bf1ec0f3d63b7010c4370913f249c80fbc + checksum: 10c0/370e73fca4278091bc1b657f85e7d74cd52b24257ea20c927a8e17546107ce04fbf313fec99aed0cc2a145ddbae1d3b12e9cc2c1320117636dc1281bcfd08059 languageName: node linkType: hard -"@typescript-eslint/scope-manager@npm:7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/scope-manager@npm:7.12.0" +"@typescript-eslint/scope-manager@npm:7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/scope-manager@npm:7.18.0" dependencies: - "@typescript-eslint/types": "npm:7.12.0" - "@typescript-eslint/visitor-keys": "npm:7.12.0" - checksum: 10c0/7af53cd9045cc70459e4f451377affc0ef03e67bd743480ab2cbfebe1b7d8269fc639406966930c5abb26f1b633623c98442c2b60f6257e0ce1555439343d5e9 + "@typescript-eslint/types": "npm:7.18.0" + "@typescript-eslint/visitor-keys": "npm:7.18.0" + checksum: 10c0/038cd58c2271de146b3a594afe2c99290034033326d57ff1f902976022c8b0138ffd3cb893ae439ae41003b5e4bcc00cabf6b244ce40e8668f9412cc96d97b8e languageName: node linkType: hard -"@typescript-eslint/type-utils@npm:7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/type-utils@npm:7.12.0" +"@typescript-eslint/type-utils@npm:7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/type-utils@npm:7.18.0" dependencies: - "@typescript-eslint/typescript-estree": "npm:7.12.0" - "@typescript-eslint/utils": "npm:7.12.0" + "@typescript-eslint/typescript-estree": "npm:7.18.0" + "@typescript-eslint/utils": "npm:7.18.0" debug: "npm:^4.3.4" ts-api-utils: "npm:^1.3.0" peerDependencies: @@ -2154,23 +2154,23 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 10c0/41f4aa20d24724b461eb0cdac69d91ef60c2b628fb4a5739e4dbb8378aa4a7ff20c302f60e5d74ce75d5b99fcd3e3d71b9b3c96a1714aac47ce2ce5d6d611fcd + checksum: 10c0/ad92a38007be620f3f7036f10e234abdc2fdc518787b5a7227e55fd12896dacf56e8b34578723fbf9bea8128df2510ba8eb6739439a3879eda9519476d5783fd languageName: node linkType: hard -"@typescript-eslint/types@npm:7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/types@npm:7.12.0" - checksum: 10c0/76786d02a0838750d74ad6e49b026875c0753b81c5a46a56525a1e82d89c0939a13434b03494e3b31b7ffbba7824f426c5b502a12337806a1f6ca560b5dad46c +"@typescript-eslint/types@npm:7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/types@npm:7.18.0" + checksum: 10c0/eb7371ac55ca77db8e59ba0310b41a74523f17e06f485a0ef819491bc3dd8909bb930120ff7d30aaf54e888167e0005aa1337011f3663dc90fb19203ce478054 languageName: node linkType: hard -"@typescript-eslint/typescript-estree@npm:7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/typescript-estree@npm:7.12.0" +"@typescript-eslint/typescript-estree@npm:7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/typescript-estree@npm:7.18.0" dependencies: - "@typescript-eslint/types": "npm:7.12.0" - "@typescript-eslint/visitor-keys": "npm:7.12.0" + "@typescript-eslint/types": "npm:7.18.0" + "@typescript-eslint/visitor-keys": "npm:7.18.0" debug: "npm:^4.3.4" globby: "npm:^11.1.0" is-glob: "npm:^4.0.3" @@ -2180,31 +2180,31 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 10c0/855be5ba6c3d7540319ad250555055a798deb04855f26abe719a3b8d555a3227d52e09453930bd829e260a72f65a985998b235514ce2872b31615015da3163c0 + checksum: 10c0/0c7f109a2e460ec8a1524339479cf78ff17814d23c83aa5112c77fb345e87b3642616291908dcddea1e671da63686403dfb712e4a4435104f92abdfddf9aba81 languageName: node linkType: hard -"@typescript-eslint/utils@npm:7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/utils@npm:7.12.0" +"@typescript-eslint/utils@npm:7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/utils@npm:7.18.0" dependencies: "@eslint-community/eslint-utils": "npm:^4.4.0" - "@typescript-eslint/scope-manager": "npm:7.12.0" - "@typescript-eslint/types": "npm:7.12.0" - "@typescript-eslint/typescript-estree": "npm:7.12.0" + "@typescript-eslint/scope-manager": "npm:7.18.0" + "@typescript-eslint/types": "npm:7.18.0" + "@typescript-eslint/typescript-estree": "npm:7.18.0" peerDependencies: eslint: ^8.56.0 - checksum: 10c0/04241c0313f2d061bc81ec2d5d589c9a723f8c1493e5b83d98f804ff9dac23c5e7157d9bb57bee8b458f40824f56ea65a02ebd344926a37cb58bf151cb4d3bf2 + checksum: 10c0/a25a6d50eb45c514469a01ff01f215115a4725fb18401055a847ddf20d1b681409c4027f349033a95c4ff7138d28c3b0a70253dfe8262eb732df4b87c547bd1e languageName: node linkType: hard -"@typescript-eslint/visitor-keys@npm:7.12.0": - version: 7.12.0 - resolution: "@typescript-eslint/visitor-keys@npm:7.12.0" +"@typescript-eslint/visitor-keys@npm:7.18.0": + version: 7.18.0 + resolution: "@typescript-eslint/visitor-keys@npm:7.18.0" dependencies: - "@typescript-eslint/types": "npm:7.12.0" + "@typescript-eslint/types": "npm:7.18.0" eslint-visitor-keys: "npm:^3.4.3" - checksum: 10c0/f3aa6704961e65fa8d66fcde57cd28e382412bb8bec2e99312bf8cda38772ae9a74d6d95b9765f76a249bc9ab65624db34b8c00078ebad129b2e1b624e935d90 + checksum: 10c0/538b645f8ff1d9debf264865c69a317074eaff0255e63d7407046176b0f6a6beba34a6c51d511f12444bae12a98c69891eb6f403c9f54c6c2e2849d1c1cb73c0 languageName: node linkType: hard @@ -3778,9 +3778,9 @@ __metadata: languageName: node linkType: hard -"chai@npm:^4.4.1": - version: 4.4.1 - resolution: "chai@npm:4.4.1" +"chai@npm:^4.5.0": + version: 4.5.0 + resolution: "chai@npm:4.5.0" dependencies: assertion-error: "npm:^1.1.0" check-error: "npm:^1.0.3" @@ -3788,8 +3788,8 @@ __metadata: get-func-name: "npm:^2.0.2" loupe: "npm:^2.3.6" pathval: "npm:^1.1.1" - type-detect: "npm:^4.0.8" - checksum: 10c0/91590a8fe18bd6235dece04ccb2d5b4ecec49984b50924499bdcd7a95c02cb1fd2a689407c19bb854497bde534ef57525cfad6c7fdd2507100fd802fbc2aefbd + type-detect: "npm:^4.1.0" + checksum: 10c0/b8cb596bd1aece1aec659e41a6e479290c7d9bee5b3ad63d2898ad230064e5b47889a3bc367b20100a0853b62e026e2dc514acf25a3c9385f936aa3614d4ab4d languageName: node linkType: hard @@ -4114,13 +4114,6 @@ __metadata: languageName: node linkType: hard -"commander@npm:3.0.2": - version: 3.0.2 - resolution: "commander@npm:3.0.2" - checksum: 10c0/8a279b4bacde68f03664086260ccb623122d2bdae6f380a41c9e06b646e830372c30a4b88261238550e0ad69d53f7af8883cb705d8237fdd22947e84913b149c - languageName: node - linkType: hard - "commander@npm:^10.0.0": version: 10.0.1 resolution: "commander@npm:10.0.1" @@ -4128,6 +4121,13 @@ __metadata: languageName: node linkType: hard +"commander@npm:^8.1.0": + version: 8.3.0 + resolution: "commander@npm:8.3.0" + checksum: 10c0/8b043bb8322ea1c39664a1598a95e0495bfe4ca2fad0d84a92d7d1d8d213e2a155b441d2470c8e08de7c4a28cf2bc6e169211c49e1b21d9f7edc6ae4d9356060 + languageName: node + linkType: hard + "commander@npm:~12.1.0": version: 12.1.0 resolution: "commander@npm:12.1.0" @@ -4970,12 +4970,12 @@ __metadata: languageName: node linkType: hard -"eslint-plugin-simple-import-sort@npm:12.1.0": - version: 12.1.0 - resolution: "eslint-plugin-simple-import-sort@npm:12.1.0" +"eslint-plugin-simple-import-sort@npm:12.1.1": + version: 12.1.1 + resolution: "eslint-plugin-simple-import-sort@npm:12.1.1" peerDependencies: eslint: ">=5.0.0" - checksum: 10c0/11e963683216e190b09bb6834b6978ca71d438d9413c52495e92493b0a68fc10268d7fd5815814496ab02fe7c018e4d5fd82866bf3ed5f95cff69628ca741102 + checksum: 10c0/0ad1907ad9ddbadd1db655db0a9d0b77076e274b793a77b982c8525d808d868e6ecfce24f3a411e8a1fa551077387f9ebb38c00956073970ebd7ee6a029ce2b3 languageName: node linkType: hard @@ -5543,9 +5543,9 @@ __metadata: languageName: node linkType: hard -"ethers@npm:^6.13.0": - version: 6.13.0 - resolution: "ethers@npm:6.13.0" +"ethers@npm:^6.13.2": + version: 6.13.2 + resolution: "ethers@npm:6.13.2" dependencies: "@adraffy/ens-normalize": "npm:1.10.1" "@noble/curves": "npm:1.2.0" @@ -5553,8 +5553,8 @@ __metadata: "@types/node": "npm:18.15.13" aes-js: "npm:4.0.0-beta.5" tslib: "npm:2.4.0" - ws: "npm:8.5.0" - checksum: 10c0/53865383d2c6d5ab23b23853a169a62165fc4631da6e48967063a763b2419637c62d89bf8b024c0e1b2feb40065e05c64b44ccf47b195de5e853b447f8fad77d + ws: "npm:8.17.1" + checksum: 10c0/5956389a180992f8b6d90bc21b2e0f28619a098513d3aeb7a350a0b7c5852d635a9d7fd4ced1af50c985dd88398716f66dfd4a2de96c5c3a67150b93543d92af languageName: node linkType: hard @@ -6245,18 +6245,19 @@ __metadata: languageName: node linkType: hard -"glob@npm:^10.4.1": - version: 10.4.1 - resolution: "glob@npm:10.4.1" +"glob@npm:^10.4.5": + version: 10.4.5 + resolution: "glob@npm:10.4.5" dependencies: foreground-child: "npm:^3.1.0" jackspeak: "npm:^3.1.2" minimatch: "npm:^9.0.4" minipass: "npm:^7.1.2" + package-json-from-dist: "npm:^1.0.0" path-scurry: "npm:^1.11.1" bin: glob: dist/esm/bin.mjs - checksum: 10c0/77f2900ed98b9cc2a0e1901ee5e476d664dae3cd0f1b662b8bfd4ccf00d0edc31a11595807706a274ca10e1e251411bbf2e8e976c82bed0d879a9b89343ed379 + checksum: 10c0/19a9759ea77b8e3ca0a43c2f07ecddc2ad46216b786bb8f993c445aee80d345925a21e5280c7b7c6c59e860a0154b84e4b2b60321fea92cd3c56b4a7489f160e languageName: node linkType: hard @@ -6503,9 +6504,9 @@ __metadata: languageName: node linkType: hard -"hardhat-tracer@npm:3.0.1": - version: 3.0.1 - resolution: "hardhat-tracer@npm:3.0.1" +"hardhat-tracer@npm:3.0.3": + version: 3.0.3 + resolution: "hardhat-tracer@npm:3.0.3" dependencies: chalk: "npm:^4.1.2" debug: "npm:^4.3.4" @@ -6514,7 +6515,7 @@ __metadata: peerDependencies: chai: 4.x hardhat: ">=2.22.5 <3.x" - checksum: 10c0/a03b871a6beed23b006cdfccc7326cecf0a596f319502da668cf63701d2adc2c0e1e8140e596e75714ca5aa091e45e1674f3eb3cc4d34eba2ab20a8ad554fbe0 + checksum: 10c0/af692b4ed29811ffd77259f52104a77141243d42e830034229608f61b40dc46137222ec304ec7c6bf99f9e38ef3f329d40320d97decf8c2ecd7c23c3f1406637 languageName: node linkType: hard @@ -6529,13 +6530,13 @@ __metadata: languageName: node linkType: hard -"hardhat@npm:^2.22.5": - version: 2.22.5 - resolution: "hardhat@npm:2.22.5" +"hardhat@npm:^2.22.7": + version: 2.22.7 + resolution: "hardhat@npm:2.22.7" dependencies: "@ethersproject/abi": "npm:^5.1.2" "@metamask/eth-sig-util": "npm:^4.0.0" - "@nomicfoundation/edr": "npm:^0.4.0" + "@nomicfoundation/edr": "npm:^0.5.0" "@nomicfoundation/ethereumjs-common": "npm:4.0.4" "@nomicfoundation/ethereumjs-tx": "npm:5.0.4" "@nomicfoundation/ethereumjs-util": "npm:9.0.4" @@ -6569,7 +6570,7 @@ __metadata: raw-body: "npm:^2.4.1" resolve: "npm:1.17.0" semver: "npm:^6.3.0" - solc: "npm:0.7.3" + solc: "npm:0.8.26" source-map-support: "npm:^0.5.13" stacktrace-parser: "npm:^0.1.10" tsort: "npm:0.0.1" @@ -6586,7 +6587,7 @@ __metadata: optional: true bin: hardhat: internal/cli/bootstrap.js - checksum: 10c0/5a714fa3d29cc875e80b894a52337ed3d4aec7aafd19648da0de1dfc99fdaec96a18a6bf6ca759cb0b5bb62f6a43bd762ea4895cc2ab3107e665719511dc0282 + checksum: 10c0/b3dc6bd5b77b6d229aa7e143581d31b867155998243028936464148315cbf03a07bac5536bd9d13f6028817c8e0ec11f934a27566c9d952bea6ae9521372dd6d languageName: node linkType: hard @@ -6853,12 +6854,12 @@ __metadata: languageName: node linkType: hard -"husky@npm:^9.0.11": - version: 9.0.11 - resolution: "husky@npm:9.0.11" +"husky@npm:^9.1.4": + version: 9.1.4 + resolution: "husky@npm:9.1.4" bin: - husky: bin.mjs - checksum: 10c0/2c787dcf74a837fc9a4fea7da907509d4bd9a289f4ea10ecc9d86279e4d4542b0f5f6443a619bccae19e265f2677172cc2b86aae5c932a35a330cc227d914605 + husky: bin.js + checksum: 10c0/f5185003bef9ad9ec3f40e821963e4c12409b993fdcab89e3d660bed7d8c9d8bfd399f05222e27e0ead6589601fb1bb08d1a589c51751a4ab0547ead3429b8de languageName: node linkType: hard @@ -7859,49 +7860,49 @@ __metadata: "@commitlint/config-conventional": "npm:^19.2.2" "@nomicfoundation/hardhat-chai-matchers": "npm:^2.0.7" "@nomicfoundation/hardhat-ethers": "npm:^3.0.6" - "@nomicfoundation/hardhat-ignition": "npm:^0.15.4" - "@nomicfoundation/hardhat-ignition-ethers": "npm:^0.15.4" + "@nomicfoundation/hardhat-ignition": "npm:^0.15.5" + "@nomicfoundation/hardhat-ignition-ethers": "npm:^0.15.5" "@nomicfoundation/hardhat-network-helpers": "npm:^1.0.11" "@nomicfoundation/hardhat-toolbox": "npm:^5.0.0" - "@nomicfoundation/hardhat-verify": "npm:^2.0.8" - "@nomicfoundation/ignition-core": "npm:^0.15.4" + "@nomicfoundation/hardhat-verify": "npm:^2.0.9" + "@nomicfoundation/ignition-core": "npm:^0.15.5" "@openzeppelin/contracts": "npm:3.4.0" "@openzeppelin/contracts-v4.4": "npm:@openzeppelin/contracts@4.4.1" "@typechain/ethers-v6": "npm:^0.5.1" "@typechain/hardhat": "npm:^9.1.0" "@types/chai": "npm:^4.3.16" - "@types/mocha": "npm:10.0.6" - "@types/node": "npm:20.14.2" - "@typescript-eslint/eslint-plugin": "npm:^7.12.0" - "@typescript-eslint/parser": "npm:^7.12.0" + "@types/mocha": "npm:10.0.7" + "@types/node": "npm:20.14.13" + "@typescript-eslint/eslint-plugin": "npm:^7.18.0" + "@typescript-eslint/parser": "npm:^7.18.0" bigint-conversion: "npm:^2.4.3" - chai: "npm:^4.4.1" + chai: "npm:^4.5.0" chalk: "npm:^4.1.2" dotenv: "npm:^16.4.5" eslint: "npm:^8.57.0" eslint-config-prettier: "npm:^9.1.0" eslint-plugin-no-only-tests: "npm:^3.1.0" - eslint-plugin-simple-import-sort: "npm:12.1.0" + eslint-plugin-simple-import-sort: "npm:12.1.1" ethereumjs-util: "npm:^7.1.5" - ethers: "npm:^6.13.0" - glob: "npm:^10.4.1" - hardhat: "npm:^2.22.5" + ethers: "npm:^6.13.2" + glob: "npm:^10.4.5" + hardhat: "npm:^2.22.7" hardhat-contract-sizer: "npm:^2.10.0" hardhat-gas-reporter: "npm:^1.0.10" hardhat-ignore-warnings: "npm:^0.2.11" - hardhat-tracer: "npm:3.0.1" + hardhat-tracer: "npm:3.0.3" hardhat-watcher: "npm:2.5.0" - husky: "npm:^9.0.11" - lint-staged: "npm:^15.2.5" + husky: "npm:^9.1.4" + lint-staged: "npm:^15.2.7" openzeppelin-solidity: "npm:2.0.0" - prettier: "npm:^3.3.1" - solhint: "npm:^5.0.1" + prettier: "npm:^3.3.3" + solhint: "npm:^5.0.2" solhint-plugin-lido: "npm:^0.0.4" solidity-coverage: "npm:^0.8.12" ts-node: "npm:^10.9.2" tsconfig-paths: "npm:^4.2.0" typechain: "npm:^8.3.2" - typescript: "npm:^5.4.5" + typescript: "npm:^5.5.4" languageName: unknown linkType: soft @@ -7919,9 +7920,9 @@ __metadata: languageName: node linkType: hard -"lint-staged@npm:^15.2.5": - version: 15.2.5 - resolution: "lint-staged@npm:15.2.5" +"lint-staged@npm:^15.2.7": + version: 15.2.7 + resolution: "lint-staged@npm:15.2.7" dependencies: chalk: "npm:~5.3.0" commander: "npm:~12.1.0" @@ -7935,7 +7936,7 @@ __metadata: yaml: "npm:~2.4.2" bin: lint-staged: bin/lint-staged.js - checksum: 10c0/89c54489783510f86df15756659facade82e849c0cbfb564fe047b82be91c5d2b1b5608a4bfc5237bd7b9fd0e1206e66aa3e4f8cad3ac51e37a098b8492c2fa6 + checksum: 10c0/c14399f9782ae222a1748144254f24b5b9afc816dc8840bd02d50f523c6582796ff18410767eb1a73cf1a83bc6e492dea7b1c4f0912bf3e434c068221f13c878 languageName: node linkType: hard @@ -9076,6 +9077,13 @@ __metadata: languageName: node linkType: hard +"package-json-from-dist@npm:^1.0.0": + version: 1.0.0 + resolution: "package-json-from-dist@npm:1.0.0" + checksum: 10c0/e3ffaf6ac1040ab6082a658230c041ad14e72fabe99076a2081bb1d5d41210f11872403fc09082daf4387fc0baa6577f96c9c0e94c90c394fd57794b66aa4033 + languageName: node + linkType: hard + "package-json@npm:^8.1.0": version: 8.1.1 resolution: "package-json@npm:8.1.1" @@ -9359,12 +9367,12 @@ __metadata: languageName: node linkType: hard -"prettier@npm:^3.3.1": - version: 3.3.1 - resolution: "prettier@npm:3.3.1" +"prettier@npm:^3.3.3": + version: 3.3.3 + resolution: "prettier@npm:3.3.3" bin: prettier: bin/prettier.cjs - checksum: 10c0/c25a709c9f0be670dc6bcb190b622347e1dbeb6c3e7df8b0711724cb64d8647c60b839937a4df4df18e9cfb556c2b08ca9d24d9645eb5488a7fc032a2c4d5cb3 + checksum: 10c0/b85828b08e7505716324e4245549b9205c0cacb25342a030ba8885aba2039a115dbcf75a0b7ca3b37bc9d101ee61fab8113fc69ca3359f2a226f1ecc07ad2e26 languageName: node linkType: hard @@ -9807,7 +9815,7 @@ __metadata: languageName: node linkType: hard -"require-from-string@npm:^2.0.0, require-from-string@npm:^2.0.2": +"require-from-string@npm:^2.0.2": version: 2.0.2 resolution: "require-from-string@npm:2.0.2" checksum: 10c0/aaa267e0c5b022fc5fd4eef49d8285086b15f2a1c54b28240fdf03599cbd9c26049fee3eab894f2e1f6ca65e513b030a7c264201e3f005601e80c49fb2937ce2 @@ -10392,22 +10400,20 @@ __metadata: languageName: node linkType: hard -"solc@npm:0.7.3": - version: 0.7.3 - resolution: "solc@npm:0.7.3" +"solc@npm:0.8.26": + version: 0.8.26 + resolution: "solc@npm:0.8.26" dependencies: command-exists: "npm:^1.2.8" - commander: "npm:3.0.2" + commander: "npm:^8.1.0" follow-redirects: "npm:^1.12.1" - fs-extra: "npm:^0.30.0" js-sha3: "npm:0.8.0" memorystream: "npm:^0.3.1" - require-from-string: "npm:^2.0.0" semver: "npm:^5.5.0" tmp: "npm:0.0.33" bin: - solcjs: solcjs - checksum: 10c0/28405adfba1f55603dc5b674630383bfbdbfab2d36deba2ff0a90c46cbc346bcabf0ed6175e12ae3c0b751ef082d0405ab42dcc24f88603a446e097a925d7425 + solcjs: solc.js + checksum: 10c0/1eea35da99c228d0dc1d831c29f7819e7921b67824c889a5e5f2e471a2ef5856a15fabc0b5de067f5ba994fa36fb5a563361963646fe98dad58a0e4fa17c8b2d languageName: node linkType: hard @@ -10433,9 +10439,9 @@ __metadata: languageName: node linkType: hard -"solhint@npm:^5.0.1": - version: 5.0.1 - resolution: "solhint@npm:5.0.1" +"solhint@npm:^5.0.2": + version: 5.0.2 + resolution: "solhint@npm:5.0.2" dependencies: "@solidity-parser/parser": "npm:^0.18.0" ajv: "npm:^6.12.6" @@ -10461,7 +10467,7 @@ __metadata: optional: true bin: solhint: solhint.js - checksum: 10c0/0da4ce2aca4fcd2bb74a7d656f359940cb5c7059101db7d5f25ef31708763a8e8362480c20ee8dd525ded6ec897b328aa0e9b86b79ffaa59585c5c59722612dc + checksum: 10c0/4fec845ec6b8bebc7bee5abef42b40474d0e162a31d21ee10d98e4cb7db1f1fcdd338b927a880d7659fd336cc738737709d86af174fb388a08bcc4f50e3a2623 languageName: node linkType: hard @@ -11402,13 +11408,20 @@ __metadata: languageName: node linkType: hard -"type-detect@npm:^4.0.0, type-detect@npm:^4.0.8": +"type-detect@npm:^4.0.0": version: 4.0.8 resolution: "type-detect@npm:4.0.8" checksum: 10c0/8fb9a51d3f365a7de84ab7f73b653534b61b622aa6800aecdb0f1095a4a646d3f5eb295322127b6573db7982afcd40ab492d038cf825a42093a58b1e1353e0bd languageName: node linkType: hard +"type-detect@npm:^4.1.0": + version: 4.1.0 + resolution: "type-detect@npm:4.1.0" + checksum: 10c0/df8157ca3f5d311edc22885abc134e18ff8ffbc93d6a9848af5b682730ca6a5a44499259750197250479c5331a8a75b5537529df5ec410622041650a7f293e2a + languageName: node + linkType: hard + "type-fest@npm:^0.20.2": version: 0.20.2 resolution: "type-fest@npm:0.20.2" @@ -11511,23 +11524,23 @@ __metadata: languageName: node linkType: hard -"typescript@npm:^5.4.5": - version: 5.4.5 - resolution: "typescript@npm:5.4.5" +"typescript@npm:^5.5.4": + version: 5.5.4 + resolution: "typescript@npm:5.5.4" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: 10c0/2954022ada340fd3d6a9e2b8e534f65d57c92d5f3989a263754a78aba549f7e6529acc1921913560a4b816c46dce7df4a4d29f9f11a3dc0d4213bb76d043251e + checksum: 10c0/422be60f89e661eab29ac488c974b6cc0a660fb2228003b297c3d10c32c90f3bcffc1009b43876a082515a3c376b1eefcce823d6e78982e6878408b9a923199c languageName: node linkType: hard -"typescript@patch:typescript@npm%3A^5.4.5#optional!builtin": - version: 5.4.5 - resolution: "typescript@patch:typescript@npm%3A5.4.5#optional!builtin::version=5.4.5&hash=5adc0c" +"typescript@patch:typescript@npm%3A^5.5.4#optional!builtin": + version: 5.5.4 + resolution: "typescript@patch:typescript@npm%3A5.5.4#optional!builtin::version=5.5.4&hash=379a07" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: 10c0/db2ad2a16ca829f50427eeb1da155e7a45e598eec7b086d8b4e8ba44e5a235f758e606d681c66992230d3fc3b8995865e5fd0b22a2c95486d0b3200f83072ec9 + checksum: 10c0/73409d7b9196a5a1217b3aaad929bf76294d3ce7d6e9766dd880ece296ee91cf7d7db6b16c6c6c630ee5096eccde726c0ef17c7dfa52b01a243e57ae1f09ef07 languageName: node linkType: hard @@ -12057,6 +12070,21 @@ __metadata: languageName: node linkType: hard +"ws@npm:8.17.1": + version: 8.17.1 + resolution: "ws@npm:8.17.1" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: 10c0/f4a49064afae4500be772abdc2211c8518f39e1c959640457dcee15d4488628620625c783902a52af2dd02f68558da2868fd06e6fd0e67ebcd09e6881b1b5bfe + languageName: node + linkType: hard + "ws@npm:8.5.0": version: 8.5.0 resolution: "ws@npm:8.5.0"