diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a17b36942e..43202ba8ed 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -29,7 +29,6 @@ ## Notifications Team /packages/notification-services-controller @MetaMask/notifications -/packages/profile-sync-controller @MetaMask/notifications ## Product Safety Team /packages/phishing-controller @MetaMask/product-safety @@ -60,6 +59,7 @@ /packages/permission-controller @MetaMask/wallet-api-platform-engineers @MetaMask/wallet-framework-engineers @MetaMask/snaps-devs /packages/permission-log-controller @MetaMask/wallet-api-platform-engineers @MetaMask/wallet-framework-engineers /packages/selected-network-controller @MetaMask/wallet-api-platform-engineers @MetaMask/wallet-framework-engineers @MetaMask/metamask-assets +/packages/profile-sync-controller @MetaMask/notifications @MetaMask/identity ## Package Release related /packages/accounts-controller/package.json @MetaMask/accounts-engineers @MetaMask/wallet-framework-engineers @@ -92,8 +92,8 @@ /packages/notification-services-controller/CHANGELOG.md @MetaMask/notifications @MetaMask/wallet-framework-engineers /packages/phishing-controller/package.json @MetaMask/product-safety @MetaMask/wallet-framework-engineers /packages/phishing-controller/CHANGELOG.md @MetaMask/product-safety @MetaMask/wallet-framework-engineers -/packages/profile-sync-controller/package.json @MetaMask/notifications @MetaMask/wallet-framework-engineers -/packages/profile-sync-controller/CHANGELOG.md @MetaMask/notifications @MetaMask/wallet-framework-engineers +/packages/profile-sync-controller/package.json @MetaMask/notifications @MetaMask/identity @MetaMask/wallet-framework-engineers +/packages/profile-sync-controller/CHANGELOG.md @MetaMask/notifications @MetaMask/identity @MetaMask/wallet-framework-engineers /packages/queued-request-controller/package.json @MetaMask/wallet-api-platform-engineers @MetaMask/wallet-framework-engineers /packages/queued-request-controller/CHANGELOG.md @MetaMask/wallet-api-platform-engineers @MetaMask/wallet-framework-engineers /packages/signature-controller/package.json @MetaMask/confirmations @MetaMask/wallet-framework-engineers diff --git a/README.md b/README.md index 91009bca92..d015ce114c 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,21 @@ This monorepo is a collection of packages used across multiple MetaMask clients (e.g. [`metamask-extension`](https://github.com/MetaMask/metamask-extension/), [`metamask-mobile`](https://github.com/MetaMask/metamask-mobile/)). -## Modules +## Contributing -This repository contains the following packages [^fn1]: +See the [Contributor Guide](./docs/contributing.md) for help on: + +- Setting up your development environment +- Working with the monorepo +- Testing changes in clients +- Issuing new releases +- Creating a new package + +## Installation/Usage + +Each package in this repository has its own README where you can find installation and usage instructions. See `packages/` for more. + +## Packages @@ -46,8 +58,6 @@ This repository contains the following packages [^fn1]: -Or, in graph form [^fn1]: - ```mermaid @@ -122,6 +132,9 @@ linkStyle default opacity:0.5 logging_controller --> controller_utils; message_manager --> base_controller; message_manager --> controller_utils; + multichain --> controller_utils; + multichain --> network_controller; + multichain --> permission_controller; name_controller --> base_controller; name_controller --> controller_utils; network_controller --> base_controller; @@ -185,10 +198,4 @@ linkStyle default opacity:0.5 -Refer to individual packages for usage instructions. - -## Learn more - -For instructions on performing common development-related tasks, see [contributing to the monorepo](./docs/contributing.md). - -[^fn1]: The package list and dependency graph should be programmatically generated by running `yarn update-readme-content`. +(This section may be regenerated at any time by running `yarn update-readme-content`.) diff --git a/docs/contributing.md b/docs/contributing.md index 6539cd1a2d..43b9230d95 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -1,15 +1,40 @@ -# Contributing to the monorepo +# Contributor Guide -## Getting started +## Table of contents -- Install the current LTS version of [Node.js](https://nodejs.org) - - If you are using [nvm](https://github.com/creationix/nvm#installation) (recommended) running `nvm install` will install the latest version and running `nvm use` will automatically choose the right node version for you. -- Install [Yarn](https://yarnpkg.com) via [Corepack](https://github.com/nodejs/corepack?tab=readme-ov-file#how-to-install) - - If you have Yarn installed globally via Homebrew or NPM, you'll need to uninstall it before enabling it via Corepack. -- Run `yarn install` to install dependencies and run any required post-install scripts. -- Run `yarn simple-git-hooks` to add a [Git hook](https://github.com/toplenboren/simple-git-hooks#what-is-a-git-hook) to your local development environment which will ensure that all files pass linting before you push a branch. +- [Setting up your development environment](#setting-up-your-development-environment) +- [Understanding codeowners](#understanding-codeowners) +- [Writing and running tests](#writing-and-running-tests) +- [Linting](#linting) +- [Building](#building) +- [Creating pull requests](#creating-pull-requests) +- [Testing changes to packages in another project](#testing-changes-to-packages-in-another-project) +- [Releasing changes](#releasing-changes) +- [Performing operations across the monorepo](#performing-operations-across-the-monorepo) +- [Adding new packages to the monorepo](#adding-new-packages-to-the-monorepo) -## Testing +## Setting up your development environment + +1. Install the current LTS version of [Node](https://nodejs.org). + - If you are using [NVM](https://github.com/creationix/nvm#installation) (recommended), running `nvm install` will install the latest version, and running `nvm use` will automatically choose the right Node version for you. +2. Run `corepack enable` to install [Yarn](https://yarnpkg.com) via [Corepack](https://github.com/nodejs/corepack?tab=readme-ov-file#how-to-install). + - If you have Yarn installed globally via Homebrew or NPM, you'll need to uninstall it before running this command. +3. Run `yarn install` to install dependencies and run any required post-install scripts. +4. Run `yarn simple-git-hooks` to add a [Git hook](https://github.com/toplenboren/simple-git-hooks#what-is-a-git-hook) to your local development environment which will ensure that all files pass linting before you push a branch. + +## Understanding codeowners + +Although maintenance of this repository is superintended by the Wallet Framework team, the responsibility of maintenance is expected to be shared among multiple teams at MetaMask. In fact, some teams have codeownership over specific packages. The exact allocation is governed by the [`CODEOWNERS`](../.github/CODEOWNERS) file. + +**If your team is listed as a codeowner for a package, you may change, approve pull requests, and create releases without consulting the Wallet Framework team.** Alternatively, if you feel that your team should be granted codeownership over a specific package, you can submit a pull request to change `CODEOWNERS`. + +## Writing and running tests + +[Jest](https://jestjs.io/) is used to ensure that code is working as expected. Ideally, all packages should have 100% test coverage. + +Please follow the [MetaMask unit testing guidelines](https://github.com/MetaMask/contributor-docs/blob/main/docs/testing/unit-testing.md) when writing tests. + +If you need to customize the behavior of Jest for a package, see `jest.config.js` within that package. - Run `yarn workspace run test` to run all tests for a package. - Run `yarn workspace run jest --no-coverage ` to run a test file within the context of a package. @@ -21,21 +46,21 @@ ## Linting -Run `yarn lint` to lint all files and show possible violations. +[ESLint](https://eslint.org/docs/v8.x/) v8 (via [MetaMask's shared ESLint configurations](https://github.com/MetaMask/eslint-config)) is used to check for code quality issues, and [Prettier](https://prettier.io/docs/en/) is used to format files. -Run `yarn lint:fix` to fix any automatically fixable violations. +If you need to customize the behavior of ESLint, see `.eslintrc.js` in the root. -## Performing operations across the monorepo +- Run `yarn lint` to lint all files and show possible violations across the monorepo. +- Run `yarn lint:fix` to fix any automatically fixable violations. -This repository relies on Yarn's [workspaces feature](https://yarnpkg.com/features/workspaces) to provide a way to work with packages individually and collectively. Refer to the documentation for the following Yarn commands for usage instructions: +## Building -- [`yarn workspace`](https://yarnpkg.com/cli/workspace) -- [`yarn workspaces foreach`](https://yarnpkg.com/cli/workspaces/foreach) +[`ts-bridge`](https://github.com/ts-bridge/ts-bridge) is used to publish packages in both CommonJS- and ESM-compatible formats. -> **Note** -> -> - `workspaceName` in the Yarn documentation is the `name` field within a package's `package.json`, e.g., `@metamask/address-book-controller`, not the directory where it is located, e.g., `packages/address-book-controller`. -> - `commandName` in the Yarn documentation is any sub-command that the `yarn` executable would usually take. Pay special attention to the difference between `run` vs `exec`. If you want to run a package script, you would use `run`, e.g., `yarn workspace @metamask/address-book-controller run changelog:validate`; but if you want to run _any_ shell command, you'd use `exec`, e.g. `yarn workspace @metamask/address-book-controller exec cat package.json | jq '.version'`. +Built files show up in the `dist/` directory in each package. These are the files which will ultimately be published to NPM. + +- Run `yarn build` to build all packages in the monorepo. +- Run `yarn workspace run build` to build a single package. ## Creating pull requests @@ -148,120 +173,116 @@ To use a preview build for a package within a project, you need to override the 4. Run `yarn install`. -## Adding new packages +## Releasing changes -> If you're migrating an existing package to the monorepo, please see [the package migration documentation](./package-migration-process-guide.md). -> You may be able to make use of `create-package` when migrating your package, but there's a lot more to it. +Have changes that you need to release? There are a few things to understand: -Manually creating a new monorepo package can be a tedious, even frustrating process. To alleviate that -problem, we have created a CLI that automates most of the job for us, creatively titled -[`create-package`](../scripts/create-package/). To create a new monorepo package, follow these steps: +- The responsibility of maintenance is not the only thing shared among multiple teams at MetaMask; releases are as well. That means **if you work on a team that has codeownership over a package, you are free to create a new release without needing the Wallet Framework team to do so.** +- Unlike clients, releases are not issued on a schedule; **anyone may create a release at any time**. Because of this, you may wish to review the Pull Requests tab on GitHub and ensure that no one else has a release candidate already in progress. If not, then you are free to start the process. +- The release process is a work in progress. Further improvements to simplify the process are planned, but in the meantime, if you encounter any issues, please reach out to the Wallet Framework team. -1. Create a new package using `yarn create-package`. - - Use the `--help` flag for usage information. - - Once this is done, you can find a package with your chosen name in `/packages`. -2. Make sure your license is correct. - - By default, `create-package` gives your package an MIT license. - - If your desired license is _not_ MIT, then you must update your `LICENSE` file and the - `license` field of `package.json`. -3. Add your dependencies. - - Do this as normal using `yarn`. - - Remember, if you are adding other monorepo packages as dependents, don't forget to add them - to the `references` array in your package's `tsconfig.json` and `tsconfig.build.json`. +Now for the process itself: -And that's it! +1. **Start by creating the release branch.** -### Contributing to `create-package` + On the `main` branch, run `yarn create-release-branch`. This command creates a branch named `release/` which will represent the new release. -Along with this documentation, `create-package` is intended to be the source of truth for the -process of adding new packages to the monorepo. Consequently, to change that process, you will want -to change `create-package`. +2. **Specify packages to release along with their versions.** -The `create-package` directory contains a [template package](../scripts/create-package/package-template/). The CLI is not aware of the contents of the template, only that its files have -[placeholder values](../scripts/create-package/constants.ts). When a new package is created, the template files are read from disk, the -placeholder values are replaced with real ones, and the updated files are added to a new directory -in `/packages`. To modify the template package: + Unless you've made a lot of breaking changes, you probably don't want to publish a new version of every single package in this repo. Fortunately, you can choose a subset of packages to include in the next release. You do this by modifying a YAML file called a "release spec", which the tool has generated and opened it in your editor. Follow the instructions at the top of the file for more information. -- If you need to add or modify any files or folders, just go ahead and make your changes in - [`/scripts/create-package/package-template`](../scripts/create-package/package-template/). - The CLI will read whatever's in that directory and write it to disk. -- If you need to add or modify any placeholders, make sure that your desired values are added to - both the relevant file(s) and - [`/scripts/create-package/constants.ts`](../scripts/create-package/constants.ts). - Then, update the implementation of the CLI accordingly. -- As with placeholders, updating the monorepo files that the CLI interacts with begins by updating - [`/scripts/create-package/constants.ts`](../scripts/create-package/constants.ts). + In addition to selecting a list of packages, you'll also want to tell the tool which new versions they ought to receive. Since you'll want to follow SemVer, how you bump a package depends on the nature of the changes. You can understand these changes better by opening the changelog for each package in your editor. -## Releasing + Once you save and close the release spec, the tool will proceed. -The [`create-release-branch`](https://github.com/MetaMask/create-release-branch) tool and [`action-publish-release`](https://github.com/MetaMask/action-publish-release) GitHub action are used to automate the release process. +3. **Include more packages as necessary.** -1. **Initiate the release branch and specify packages to be released.** + Some packages in the monorepo have dependencies on other packages elsewhere in the monorepo. To ensure that clients are able to upgrade without receiving compile time or runtime errors, you may need to include some of these dependencies in your release. If the tool thinks that there are some packages you've left out, it will pause and let you know what they are. - 1. **Create the release branch.** + To address the errors, you'll need to copy the path to the YAML file, reopen it in your editor, and include the packages it mentions. You also have the option to skip any packages you think aren't an issue, but make sure you've checked. (If you have any questions about this step, let the Wallet Framework team know.) - Start by running `yarn create-release-branch`. This command creates a branch named `release/` which will represent the new release. + Once you've made the requisite changes to the YAML file, save it and re-run `yarn create-release-branch`. You may need to repeat this step multiple times until you don't see any more errors. - 2. **Specify packages to release along with their versions.** +4. **Review and update changelogs for relevant packages.** - At this point, you need to tell the tool which packages you want to include in the next release and which versions to assign to those packages. You do this by modifying a YAML file called a "release spec", which the tool has generated and opened it in your editor. Follow the instructions at the top of the file to proceed. + Once the tool proceeds without issue, you will be on the new release branch. In addition, each package you intend to release has been updated in two ways: - To assist you, the tool has also updated all of the packages that have been changed since their previous releases so that their changelogs now reflect those new changes. This should help you to understand what will be released and how to bump the versions. + - The version in `package.json` has been bumped. + - A new section has been added at the top of `CHANGELOG` for the new version. - Once you save and close the release spec, the tool will proceed. + At this point, you need to review the changelog entries and ensure that they are helpful for consumers: -2. **update all packages dependencies to their latest version** + - Categorize entries appropriately following the ["Keep a Changelog"](https://keepachangelog.com/en/1.0.0/) guidelines. Ensure that no changes are listed under "Uncategorized". + - Remove changelog entries that don't affect consumers of the package (e.g. lockfile changes or development environment changes). Exceptions may be made for changes that might be of interest despite not having an effect upon the published package (e.g. major test improvements, security improvements, improved documentation, etc.). + - Reword changelog entries to explain changes in terms that users of the package will understand (e.g., avoid referencing internal variables/concepts). + - Consolidate related changes into single entries where appropriate. - Run `yarn constraints --fix && yarn && yarn dedupe`. + Make sure to run `yarn changelog:validate` once you're done to ensure all changelogs are correctly formatted. -3. **Review and update changelogs for relevant packages.** +5. **Push and submit a pull request for the release branch so that it can be reviewed and tested.** - 1. At this point, the versions of all packages you intend to release have been bumped and their changelogs list new changes. Now you need to go through each changelog and make sure that they follow existing standards: + Release PRs can be approved by codeowners of affected packages, so as long as the above guidelines have been followed, there is no need to reach out to the Wallet Framework team for approval. - - Categorize entries appropriately following the ["Keep a Changelog"](https://keepachangelog.com/en/1.0.0/) guidelines. - - Remove changelog entries that don't affect consumers of the package (e.g. lockfile changes or development environment changes). Exceptions may be made for changes that might be of interest despite not having an effect upon the published package (e.g. major test improvements, security improvements, improved documentation, etc.). - - Reword changelog entries to explain changes in terms that users of the package will understand (e.g., avoid referencing internal variables/concepts). - - Consolidate related changes into single entries where appropriate. +6. **Incorporate new changes made to `main` into changelogs.** - 2. Run `yarn changelog:validate` to ensure all changelogs are correctly formatted. + If at any point you see the "Update branch" button on your release PR, stop and look over the most recent commits made to `main`. If there are new changes to package you are trying to release, make sure that the changes are reflected in the changelog for that package. -4. **Push and submit a pull request for the release branch so that it can be reviewed and tested.** +7. **"Squash & Merge" the release and wait for approval.** - Make sure the title of the pull request follows the pattern "Release \". + You're almost there! - If changes are made to the base branch, the release branch will need to be updated with these changes and review/QA will need to restart again. As such, it's probably best to avoid merging other PRs into the base branch while review is underway. + Merging triggers the [`publish-release` GitHub action](https://github.com/MetaMask/action-publish-release) workflow to tag the final release commit and publish the release on GitHub. Before packages are published to NPM, this action will automatically notify the [`npm-publishers`](https://github.com/orgs/MetaMask/teams/npm-publishers) team in Slack to review and approve the release. -5. **"Squash & Merge" the release.** +8. **Verify that the new versions have been published.** - This step triggers the [`publish-release` GitHub action](https://github.com/MetaMask/action-publish-release) workflow to tag the final release commit and publish the release on GitHub. + Once the `npm-publishers` team has approved the release, you can click on the link in the Slack message to monitor the remainder of the process. - Pay attention to the box you see when you press the green button and ensure that the final name of the commit follows the pattern "Release \". + Once the action has completed, [check NPM](https://npms.io/search?q=scope%3Ametamask) to verify that all relevant packages has been published. -6. **Publish the release on NPM.** + You're done! - The `publish-release` GitHub Action workflow runs the `publish-npm` job, which publishes relevant packages to NPM. It requires approval from the [`npm-publishers`](https://github.com/orgs/MetaMask/teams/npm-publishers) team to complete. If you're not on the team, ask a member to approve it for you; otherwise, approve the job. +## Performing operations across the monorepo - Once the `publish-npm` job has finished, [check NPM](https://npms.io/search?q=scope%3Ametamask) to verify that all relevant packages has been published. +This repository relies on Yarn's [workspaces feature](https://yarnpkg.com/features/workspaces) to provide a way to work with packages individually and collectively. Refer to the documentation for the following Yarn commands for usage instructions: -### Handling common errors +- [`yarn workspace`](https://yarnpkg.com/cli/workspace) +- [`yarn workspaces foreach`](https://yarnpkg.com/cli/workspaces/foreach) -If an error occurs, re-edit the release spec and rerun `yarn create-release-branch`. Common errors include: +> **Note** +> +> - `workspaceName` in the Yarn documentation is the `name` field within a package's `package.json`, e.g., `@metamask/address-book-controller`, not the directory where it is located, e.g., `packages/address-book-controller`. +> - `commandName` in the Yarn documentation is any sub-command that the `yarn` executable would usually take. Pay special attention to the difference between `run` vs `exec`. If you want to run a package script, you would use `run`, e.g., `yarn workspace @metamask/address-book-controller run changelog:validate`; but if you want to run _any_ shell command, you'd use `exec`, e.g. `yarn workspace @metamask/address-book-controller exec cat package.json | jq '.version'`. -- **Invalid Version Specifier:** +## Adding new packages to the monorepo - - Error: `* Line 14: "invalid_version" is not a valid version specifier...` - - Resolution: Use "major", "minor", "patch", or a specific version number like "1.2.3". +> [!NOTE] +> If you're migrating an existing package to the monorepo, please see [the package migration documentation](./package-migration-process-guide.md). +> You may be able to make use of `create-package` when migrating your package, but there's a lot more to it. -- **Version Less than or Equal to Current:** +Manually creating a new monorepo package can be a tedious, even frustrating process. To alleviate that +problem, we have created a CLI that automates most of the job for us, creatively titled +[`create-package`](../scripts/create-package/). To create a new monorepo package, follow these steps: - - Error: `* Line 14: "1.2.3" is not a valid version specifier...` - - Resolution: Specify a version greater than the current version of the package. +1. Create a new package using `yarn create-package`. + - Use the `--help` flag for usage information. + - Once this is done, you can find a package with your chosen name in `/packages`. +2. Make sure your license is correct. + - By default, `create-package` gives your package an MIT license. + - If your desired license is _not_ MIT, then you must update your `LICENSE` file and the + `license` field of `package.json`. +3. Add your dependencies. + - Do this as normal using `yarn`. + - Remember, if you are adding other monorepo packages as dependents, don't forget to add them + to the `references` array in your package's `tsconfig.json` and `tsconfig.build.json`. + +And that's it! + +### Contributing to `create-package` -- **Releasing Packages with Breaking Changes:** +Along with this documentation, `create-package` is intended to be the source of truth for the process of adding new packages to the monorepo. Consequently, to change that process, you will want to change `create-package`. - - Error: `* The following dependents of package '@metamask/a'...` - - Resolution: Include dependent packages in the release or use "intentionally-skip" if certain they are unaffected. +The `create-package` directory contains a [template package](../scripts/create-package/package-template/). The CLI is not aware of the contents of the template, only that its files have [placeholder values](../scripts/create-package/constants.ts). When a new package is created, the template files are read from disk, the placeholder values are replaced with real ones, and the updated files are added to a new directory in `/packages`. To modify the template package: -- **Dependencies/Peer Dependencies Missing:** - - Error: `* The following packages, which are dependencies...` - - Resolution: Include necessary dependencies or peer dependencies in the release or use "intentionally-skip" if certain they are unaffected. +- If you need to add or modify any files or folders, just go ahead and make your changes in [`/scripts/create-package/package-template`](../scripts/create-package/package-template/). The CLI will read whatever's in that directory and write it to disk. +- If you need to add or modify any placeholders, make sure that your desired values are added to both the relevant file(s) and [`/scripts/create-package/constants.ts`](../scripts/create-package/constants.ts). Then, update the implementation of the CLI accordingly. +- As with placeholders, updating the monorepo files that the CLI interacts with begins by updating [`/scripts/create-package/constants.ts`](../scripts/create-package/constants.ts). diff --git a/package.json b/package.json index 0779428d8c..e2e0fa10d2 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/core-monorepo", - "version": "245.0.0", + "version": "257.0.0", "private": true, "description": "Monorepo for packages shared between MetaMask clients", "repository": { diff --git a/packages/accounts-controller/CHANGELOG.md b/packages/accounts-controller/CHANGELOG.md index ce886e69d2..01002430f4 100644 --- a/packages/accounts-controller/CHANGELOG.md +++ b/packages/accounts-controller/CHANGELOG.md @@ -7,6 +7,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [20.0.0] + +### Changed + +- **BREAKING:** Bump peer dependency `@metamask/keyring-controller` from `^18.0.0` to `^19.0.0` ([#4915](https://github.com/MetaMask/core/pull/4956)) +- **BREAKING:** Bump `@metamask/keyring-api` from `^8.1.3` to `^10.1.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) + - If you are depending on `@metamask/providers` directly, you will need to upgrade to `18.1.0`. +- Bump `@metamask/eth-snap-keyring` from `^4.3.6` to `^5.0.1` ([#4948](https://github.com/MetaMask/core/pull/4948)) +- Bump `@metamask/snaps-utils` from `^4.3.6` to `^8.3.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) +- Bump `@metamask/snaps-sdk` from `^6.5.0` to `^6.7.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) + ## [19.0.0] ### Changed @@ -343,7 +354,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Initial release ([#1637](https://github.com/MetaMask/core/pull/1637)) -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/accounts-controller@19.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/accounts-controller@20.0.0...HEAD +[20.0.0]: https://github.com/MetaMask/core/compare/@metamask/accounts-controller@19.0.0...@metamask/accounts-controller@20.0.0 [19.0.0]: https://github.com/MetaMask/core/compare/@metamask/accounts-controller@18.2.3...@metamask/accounts-controller@19.0.0 [18.2.3]: https://github.com/MetaMask/core/compare/@metamask/accounts-controller@18.2.2...@metamask/accounts-controller@18.2.3 [18.2.2]: https://github.com/MetaMask/core/compare/@metamask/accounts-controller@18.2.1...@metamask/accounts-controller@18.2.2 diff --git a/packages/accounts-controller/package.json b/packages/accounts-controller/package.json index ceaa7e999c..d36254ead4 100644 --- a/packages/accounts-controller/package.json +++ b/packages/accounts-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/accounts-controller", - "version": "19.0.0", + "version": "20.0.0", "description": "Manages internal accounts", "keywords": [ "MetaMask", @@ -49,10 +49,10 @@ "dependencies": { "@ethereumjs/util": "^8.1.0", "@metamask/base-controller": "^7.0.2", - "@metamask/eth-snap-keyring": "^4.3.6", - "@metamask/keyring-api": "^8.1.3", - "@metamask/snaps-sdk": "^6.5.0", - "@metamask/snaps-utils": "^8.1.1", + "@metamask/eth-snap-keyring": "^5.0.1", + "@metamask/keyring-api": "^10.1.0", + "@metamask/snaps-sdk": "^6.7.0", + "@metamask/snaps-utils": "^8.3.0", "@metamask/utils": "^10.0.0", "deepmerge": "^4.2.2", "ethereum-cryptography": "^2.1.2", @@ -61,8 +61,8 @@ }, "devDependencies": { "@metamask/auto-changelog": "^3.4.4", - "@metamask/keyring-controller": "^18.0.0", - "@metamask/snaps-controllers": "^9.7.0", + "@metamask/keyring-controller": "^19.0.0", + "@metamask/snaps-controllers": "^9.10.0", "@types/jest": "^27.4.1", "@types/readable-stream": "^2.3.0", "jest": "^27.5.1", @@ -72,7 +72,7 @@ "typescript": "~5.2.2" }, "peerDependencies": { - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/snaps-controllers": "^9.7.0" }, "engines": { diff --git a/packages/accounts-controller/src/tests/mocks.ts b/packages/accounts-controller/src/tests/mocks.ts index daebd1fbc3..b41388fb2f 100644 --- a/packages/accounts-controller/src/tests/mocks.ts +++ b/packages/accounts-controller/src/tests/mocks.ts @@ -55,7 +55,7 @@ export const createMockInternalAccount = ({ ]; break; case BtcAccountType.P2wpkh: - methods = [BtcMethod.SendMany]; + methods = [BtcMethod.SendBitcoin]; break; default: throw new Error(`Unknown account type: ${type as string}`); diff --git a/packages/assets-controllers/CHANGELOG.md b/packages/assets-controllers/CHANGELOG.md index 923f49ed9b..b2002d577f 100644 --- a/packages/assets-controllers/CHANGELOG.md +++ b/packages/assets-controllers/CHANGELOG.md @@ -7,6 +7,67 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [45.1.0] + +### Added + +- `chainIdToNativeTokenAddress` to record chains with unique (non-zero) addresses ([#4952](https://github.com/MetaMask/core/pull/4952)) +- `getNativeTokenAddress()` exported function to return the correct native token address for native assets ([#4952](https://github.com/MetaMask/core/pull/4952)) +- add support for all added networks when switching account for Token Detection ([#4957](https://github.com/MetaMask/core/pull/4957)) + +### Changed + +- Update price API calls to use the native token by chain instead of relying on the zero address. ([#4952](https://github.com/MetaMask/core/pull/4952)) +- Update `TokenRatesController` market data mapping to use `getNativeTokenAddress` instead of the zero address for native tokens. ([#4952](https://github.com/MetaMask/core/pull/4952)) + +## [45.0.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-controller` peer dependency from `^18.0.0` to `^19.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) +- **BREAKING:** Bump `@metamask/accounts-controller` peer dependency from `^19.0.0` to `^20.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) +- **BREAKING:** Bump `@metamask/preferences-controller` peer dependency from `^14.0.0` to `^15.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) + +## [44.1.0] + +### Changed + +- An argument `networkClientId` is added to `TokensController.ignoreTokens`, allowing tokens to be ignored on specific chains. ([#4949](https://github.com/MetaMask/core/pull/4949)) + +## [44.0.1] + +### Changed + +- Fixes an issue where the token detection was unnecessarily falling back to an RPC approach, causing redundant detections. ([#4928](https://github.com/MetaMask/core/pull/4928)) + +- Fixes an issue where `TokensController.addTokens` was not respecting the network client id passed to it. ([#4940](https://github.com/MetaMask/core/pull/4940)) + +## [44.0.0] + +### Changed + +- **BREAKING**: The `TokenBalancesController` state is now across all chains and accounts under the field `tokenBalances`, as a mapping from account address -> chain id -> token address -> balance. ([#4782](https://github.com/MetaMask/core/pull/4782)) + +- **BREAKING**: The `TokenBalancesController` now extends `StaticIntervalPollingController`, and the new polling API `startPolling` must be used to initiate polling (`startPolling`, `stopPollingByPollingToken`). ([#4782](https://github.com/MetaMask/core/pull/4782)) + +- **BREAKING**: `TokenBalancesController` now requires subscriptions to the `PreferencesController:stateChange` and `NetworkController:stateChange` events. And access to the `NetworkController:getNetworkClientById`, `NetworkController:getState`, `TokensController:getState`, and `PreferencesController:getState` actions. ([#4782](https://github.com/MetaMask/core/pull/4782)) + +- **BREAKING**: `TokensController` requires a subscription to the `NetworkController:stateChange` event. It now now removes state for chain IDs when their network is removed. ([#4782](https://github.com/MetaMask/core/pull/4782)) + +- `TokenRatesController` now removes state for chain IDs when their network is removed. ([#4782](https://github.com/MetaMask/core/pull/4782)) + +## [43.1.1] + +### Changed + +- Fix a bug in `TokensController.addTokens` where tokens could be added from the wrong chain. ([#4924](https://github.com/MetaMask/core/pull/4924)) + +## [43.1.0] + +### Added + +- Add Solana to the polled exchange rates ([#4914](https://github.com/MetaMask/core/pull/4914)) + ## [43.0.0] ### Added @@ -1204,7 +1265,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Use Ethers for AssetsContractController ([#845](https://github.com/MetaMask/core/pull/845)) -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@43.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@45.1.0...HEAD +[45.1.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@45.0.0...@metamask/assets-controllers@45.1.0 +[45.0.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@44.1.0...@metamask/assets-controllers@45.0.0 +[44.1.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@44.0.1...@metamask/assets-controllers@44.1.0 +[44.0.1]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@44.0.0...@metamask/assets-controllers@44.0.1 +[44.0.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@43.1.1...@metamask/assets-controllers@44.0.0 +[43.1.1]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@43.1.0...@metamask/assets-controllers@43.1.1 +[43.1.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@43.0.0...@metamask/assets-controllers@43.1.0 [43.0.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@42.0.0...@metamask/assets-controllers@43.0.0 [42.0.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@41.0.0...@metamask/assets-controllers@42.0.0 [41.0.0]: https://github.com/MetaMask/core/compare/@metamask/assets-controllers@40.0.0...@metamask/assets-controllers@41.0.0 diff --git a/packages/assets-controllers/package.json b/packages/assets-controllers/package.json index 61b73e747d..f19034abe0 100644 --- a/packages/assets-controllers/package.json +++ b/packages/assets-controllers/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/assets-controllers", - "version": "43.0.0", + "version": "45.1.0", "description": "Controllers which manage interactions involving ERC-20, ERC-721, and ERC-1155 tokens (including NFTs)", "keywords": [ "MetaMask", @@ -48,6 +48,7 @@ }, "dependencies": { "@ethereumjs/util": "^8.1.0", + "@ethersproject/abi": "^5.7.0", "@ethersproject/address": "^5.7.0", "@ethersproject/bignumber": "^5.7.0", "@ethersproject/contracts": "^5.7.0", @@ -73,14 +74,14 @@ "uuid": "^8.3.2" }, "devDependencies": { - "@metamask/accounts-controller": "^19.0.0", + "@metamask/accounts-controller": "^20.0.0", "@metamask/approval-controller": "^7.1.1", "@metamask/auto-changelog": "^3.4.4", "@metamask/ethjs-provider-http": "^0.3.0", - "@metamask/keyring-api": "^8.1.3", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-api": "^10.1.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/network-controller": "^22.0.2", - "@metamask/preferences-controller": "^14.0.0", + "@metamask/preferences-controller": "^15.0.0", "@types/jest": "^27.4.1", "@types/lodash": "^4.14.191", "@types/node": "^16.18.54", @@ -95,11 +96,11 @@ "typescript": "~5.2.2" }, "peerDependencies": { - "@metamask/accounts-controller": "^19.0.0", + "@metamask/accounts-controller": "^20.0.0", "@metamask/approval-controller": "^7.0.0", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/network-controller": "^22.0.0", - "@metamask/preferences-controller": "^14.0.0" + "@metamask/preferences-controller": "^15.0.0" }, "engines": { "node": "^18.18 || >=20" diff --git a/packages/assets-controllers/src/RatesController/RatesController.test.ts b/packages/assets-controllers/src/RatesController/RatesController.test.ts index bf9d0fc193..f2fca24f76 100644 --- a/packages/assets-controllers/src/RatesController/RatesController.test.ts +++ b/packages/assets-controllers/src/RatesController/RatesController.test.ts @@ -100,8 +100,14 @@ describe('RatesController', () => { const { fiatCurrency, rates, cryptocurrencies } = ratesController.state; expect(ratesController).toBeDefined(); expect(fiatCurrency).toBe('usd'); - expect(Object.keys(rates)).toStrictEqual([Cryptocurrency.Btc]); - expect(cryptocurrencies).toStrictEqual([Cryptocurrency.Btc]); + expect(Object.keys(rates)).toStrictEqual([ + Cryptocurrency.Btc, + Cryptocurrency.Solana, + ]); + expect(cryptocurrencies).toStrictEqual([ + Cryptocurrency.Btc, + Cryptocurrency.Solana, + ]); }); }); @@ -119,11 +125,16 @@ describe('RatesController', () => { const publishActionSpy = jest.spyOn(messenger, 'publish'); jest.spyOn(global.Date, 'now').mockImplementation(() => getStubbedDate()); - const mockRateValue = 57715.42; + const mockBtcRateValue = 57715.42; + const mockSolRateValue = 200.48; + const fetchExchangeRateStub = jest.fn(() => { return Promise.resolve({ btc: { - eur: mockRateValue, + eur: mockBtcRateValue, + }, + sol: { + eur: mockSolRateValue, }, }); }); @@ -144,6 +155,10 @@ describe('RatesController', () => { conversionDate: 0, conversionRate: 0, }, + sol: { + conversionDate: 0, + conversionRate: 0, + }, }); await ratesController.start(); @@ -163,7 +178,11 @@ describe('RatesController', () => { expect(ratesPosUpdate).toStrictEqual({ btc: { conversionDate: MOCK_TIMESTAMP, - conversionRate: mockRateValue, + conversionRate: mockBtcRateValue, + }, + sol: { + conversionDate: MOCK_TIMESTAMP, + conversionRate: mockSolRateValue, }, }); @@ -326,7 +345,10 @@ describe('RatesController', () => { const cryptocurrencyListPreUpdate = ratesController.getCryptocurrencyList(); - expect(cryptocurrencyListPreUpdate).toStrictEqual([Cryptocurrency.Btc]); + expect(cryptocurrencyListPreUpdate).toStrictEqual([ + Cryptocurrency.Btc, + Cryptocurrency.Solana, + ]); // Just to make sure we're updating to something else than the default list expect(cryptocurrencyListPreUpdate).not.toStrictEqual( mockCryptocurrencyList, diff --git a/packages/assets-controllers/src/RatesController/RatesController.ts b/packages/assets-controllers/src/RatesController/RatesController.ts index 7abed73eed..16588ef0d0 100644 --- a/packages/assets-controllers/src/RatesController/RatesController.ts +++ b/packages/assets-controllers/src/RatesController/RatesController.ts @@ -12,8 +12,15 @@ import type { export const name = 'RatesController'; +/** + * Supported cryptocurrencies that can be used as a base currency. The value needs to be compatible + * with CryptoCompare's API which is the default source for the rates. + * + * See: https://min-api.cryptocompare.com/documentation?key=Price&cat=multipleSymbolsPriceEndpoint + */ export enum Cryptocurrency { Btc = 'btc', + Solana = 'sol', } const DEFAULT_INTERVAL = 180000; @@ -31,8 +38,12 @@ const defaultState = { conversionDate: 0, conversionRate: 0, }, + [Cryptocurrency.Solana]: { + conversionDate: 0, + conversionRate: 0, + }, }, - cryptocurrencies: [Cryptocurrency.Btc], + cryptocurrencies: [Cryptocurrency.Btc, Cryptocurrency.Solana], }; export class RatesController extends BaseController< diff --git a/packages/assets-controllers/src/TokenBalancesController.test.ts b/packages/assets-controllers/src/TokenBalancesController.test.ts index b2099d05c1..0be4e66116 100644 --- a/packages/assets-controllers/src/TokenBalancesController.test.ts +++ b/packages/assets-controllers/src/TokenBalancesController.test.ts @@ -1,480 +1,471 @@ import { ControllerMessenger } from '@metamask/base-controller'; import { toHex } from '@metamask/controller-utils'; -import type { InternalAccount } from '@metamask/keyring-api'; +import type { NetworkState } from '@metamask/network-controller'; +import type { PreferencesState } from '@metamask/preferences-controller'; import BN from 'bn.js'; +import { useFakeTimers } from 'sinon'; -import { flushPromises } from '../../../tests/helpers'; -import { createMockInternalAccount } from '../../accounts-controller/src/tests/mocks'; +import { advanceTime } from '../../../tests/helpers'; +import * as multicall from './multicall'; import type { AllowedActions, AllowedEvents, TokenBalancesControllerActions, TokenBalancesControllerEvents, - TokenBalancesControllerMessenger, TokenBalancesControllerState, } from './TokenBalancesController'; import { TokenBalancesController } from './TokenBalancesController'; -import type { Token } from './TokenRatesController'; -import { - getDefaultTokensState, - type TokensControllerState, -} from './TokensController'; - -const controllerName = 'TokenBalancesController'; - -/** - * Constructs a restricted controller messenger. - * - * @param controllerMessenger - The controller messenger to restrict. - * @returns A restricted controller messenger. - */ -function getMessenger( - controllerMessenger = new ControllerMessenger< - TokenBalancesControllerActions | AllowedActions, - TokenBalancesControllerEvents | AllowedEvents - >(), -): TokenBalancesControllerMessenger { - return controllerMessenger.getRestricted({ - name: controllerName, - allowedActions: [ - 'AccountsController:getSelectedAccount', - 'AssetsContractController:getERC20BalanceOf', - ], - allowedEvents: ['TokensController:stateChange'], - }); -} +import type { TokensControllerState } from './TokensController'; const setupController = ({ config, - mock, + tokens = { allTokens: {}, allDetectedTokens: {} }, }: { config?: Partial[0]>; - mock: { - getBalanceOf?: BN; - selectedAccount: InternalAccount; - }; -}): { - controller: TokenBalancesController; - messenger: TokenBalancesControllerMessenger; - mockSelectedAccount: jest.Mock; - mockGetERC20BalanceOf: jest.Mock; - triggerTokensStateChange: (state: TokensControllerState) => Promise; -} => { - const controllerMessenger = new ControllerMessenger< + tokens?: Partial; +} = {}) => { + const messenger = new ControllerMessenger< TokenBalancesControllerActions | AllowedActions, TokenBalancesControllerEvents | AllowedEvents >(); - const messenger = getMessenger(controllerMessenger); - const mockSelectedAccount = jest.fn().mockReturnValue(mock.selectedAccount); - const mockGetERC20BalanceOf = jest.fn().mockReturnValue(mock.getBalanceOf); + const tokenBalancesMessenger = messenger.getRestricted({ + name: 'TokenBalancesController', + allowedActions: [ + 'NetworkController:getState', + 'NetworkController:getNetworkClientById', + 'PreferencesController:getState', + 'TokensController:getState', + 'AccountsController:getSelectedAccount', + ], + allowedEvents: [ + 'NetworkController:stateChange', + 'PreferencesController:stateChange', + 'TokensController:stateChange', + ], + }); - controllerMessenger.registerActionHandler( - 'AccountsController:getSelectedAccount', - mockSelectedAccount, + messenger.registerActionHandler( + 'NetworkController:getState', + jest.fn().mockImplementation(() => ({ + networkConfigurationsByChainId: { + '0x1': { + defaultRpcEndpointIndex: 0, + rpcEndpoints: [{}], + }, + }, + })), + ); + + messenger.registerActionHandler( + 'PreferencesController:getState', + jest.fn().mockImplementation(() => ({})), ); - controllerMessenger.registerActionHandler( - 'AssetsContractController:getERC20BalanceOf', - mockGetERC20BalanceOf, + + messenger.registerActionHandler( + 'TokensController:getState', + jest.fn().mockImplementation(() => tokens), ); - const controller = new TokenBalancesController({ - messenger, - ...config, - }); + messenger.registerActionHandler( + 'AccountsController:getSelectedAccount', + jest.fn().mockImplementation(() => ({ + address: '0x0000000000000000000000000000000000000000', + })), + ); - const triggerTokensStateChange = async (state: TokensControllerState) => { - controllerMessenger.publish('TokensController:stateChange', state, []); - }; + messenger.registerActionHandler( + 'NetworkController:getNetworkClientById', + jest.fn().mockReturnValue({ provider: jest.fn() }), + ); return { - controller, + controller: new TokenBalancesController({ + messenger: tokenBalancesMessenger, + ...config, + }), messenger, - mockSelectedAccount, - mockGetERC20BalanceOf, - triggerTokensStateChange, }; }; describe('TokenBalancesController', () => { + let clock: sinon.SinonFakeTimers; + beforeEach(() => { - jest.useFakeTimers(); + clock = useFakeTimers(); }); afterEach(() => { - jest.useRealTimers(); + clock.restore(); }); it('should set default state', () => { - const { controller } = setupController({ - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - }, - }); - - expect(controller.state).toStrictEqual({ contractBalances: {} }); + const { controller } = setupController(); + expect(controller.state).toStrictEqual({ tokenBalances: {} }); }); it('should poll and update balances in the right interval', async () => { - const updateBalancesSpy = jest.spyOn( + const pollSpy = jest.spyOn( TokenBalancesController.prototype, - 'updateBalances', + '_executePoll', ); - new TokenBalancesController({ - interval: 10, - messenger: getMessenger(new ControllerMessenger()), - }); - await flushPromises(); + const interval = 10; + const { controller } = setupController({ config: { interval } }); - expect(updateBalancesSpy).toHaveBeenCalled(); - expect(updateBalancesSpy).not.toHaveBeenCalledTimes(2); + controller.startPolling({ chainId: '0x1' }); - jest.advanceTimersByTime(15); + await advanceTime({ clock, duration: 1 }); + expect(pollSpy).toHaveBeenCalled(); + expect(pollSpy).not.toHaveBeenCalledTimes(2); - expect(updateBalancesSpy).toHaveBeenCalledTimes(2); + await advanceTime({ clock, duration: interval * 1.5 }); + expect(pollSpy).toHaveBeenCalledTimes(2); }); - it('should update balances if enabled', async () => { - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const { controller } = setupController({ - config: { - disabled: false, - tokens: [{ address, decimals: 18, symbol: 'EOS', aggregators: [] }], - interval: 10, - }, - mock: { - getBalanceOf: new BN(1), - selectedAccount: createMockInternalAccount({ address: '0x1234' }), + it('should update balances on poll', async () => { + const chainId = '0x1'; + const accountAddress = '0x0000000000000000000000000000000000000000'; + const tokenAddress = '0x0000000000000000000000000000000000000001'; + + const tokens = { + allDetectedTokens: {}, + allTokens: { + [chainId]: { + [accountAddress]: [ + { address: tokenAddress, symbol: 's', decimals: 0 }, + ], + }, }, - }); + }; - await controller.updateBalances(); + const { controller } = setupController({ tokens }); + expect(controller.state.tokenBalances).toStrictEqual({}); - expect(controller.state.contractBalances).toStrictEqual({ - '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), - }); - }); - - it('should not update balances if disabled', async () => { - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const { controller } = setupController({ - config: { - disabled: true, - tokens: [{ address, decimals: 18, symbol: 'EOS', aggregators: [] }], - interval: 10, - }, - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - getBalanceOf: new BN(1), + const balance = 123456; + jest.spyOn(multicall, 'multicallOrFallback').mockResolvedValue([ + { + success: true, + value: new BN(balance), }, - }); + ]); - await controller.updateBalances(); + await controller._executePoll({ chainId }); - expect(controller.state.contractBalances).toStrictEqual({}); + expect(controller.state.tokenBalances).toStrictEqual({ + [accountAddress]: { + [chainId]: { + [tokenAddress]: toHex(balance), + }, + }, + }); }); - it('should update balances if controller is manually enabled', async () => { - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const { controller } = setupController({ - config: { - disabled: true, - tokens: [{ address, decimals: 18, symbol: 'EOS', aggregators: [] }], - interval: 10, - }, - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - getBalanceOf: new BN(1), + it('should update balances when they change', async () => { + const chainId = '0x1'; + const accountAddress = '0x0000000000000000000000000000000000000000'; + const tokenAddress = '0x0000000000000000000000000000000000000001'; + + const tokens = { + allDetectedTokens: {}, + allTokens: { + [chainId]: { + [accountAddress]: [ + { address: tokenAddress, symbol: 's', decimals: 0 }, + ], + }, }, - }); + }; - await controller.updateBalances(); + const { controller } = setupController({ tokens }); + expect(controller.state.tokenBalances).toStrictEqual({}); - expect(controller.state.contractBalances).toStrictEqual({}); + for (let balance = 0; balance < 10; balance++) { + jest.spyOn(multicall, 'multicallOrFallback').mockResolvedValue([ + { + success: true, + value: new BN(balance), + }, + ]); - controller.enable(); - await controller.updateBalances(); + await controller._executePoll({ chainId }); - expect(controller.state.contractBalances).toStrictEqual({ - '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), - }); + expect(controller.state.tokenBalances).toStrictEqual({ + [accountAddress]: { + [chainId]: { + [tokenAddress]: toHex(balance), + }, + }, + }); + } }); - it('should not update balances if controller is manually disabled', async () => { - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const { controller } = setupController({ - config: { - disabled: false, - tokens: [{ address, decimals: 18, symbol: 'EOS', aggregators: [] }], - interval: 10, - }, - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - getBalanceOf: new BN(1), - }, - }); + it('updates balances when tokens are added', async () => { + const chainId = '0x1'; + const { controller, messenger } = setupController(); - await controller.updateBalances(); + // No tokens initially + await controller._executePoll({ chainId }); + expect(controller.state.tokenBalances).toStrictEqual({}); - expect(controller.state.contractBalances).toStrictEqual({ - '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), - }); + const balance = 123456; + jest.spyOn(multicall, 'multicallOrFallback').mockResolvedValue([ + { + success: true, + value: new BN(balance), + }, + ]); - controller.disable(); - await controller.updateBalances(); + // Publish an update with a token + const accountAddress = '0x0000000000000000000000000000000000000000'; + const tokenAddress = '0x0000000000000000000000000000000000000001'; - expect(controller.state.contractBalances).toStrictEqual({ - '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), - }); - }); - - it('should update balances if tokens change and controller is manually enabled', async () => { - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const { controller, triggerTokensStateChange } = setupController({ - config: { - disabled: true, - tokens: [{ address, decimals: 18, symbol: 'EOS', aggregators: [] }], - interval: 10, - }, - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - getBalanceOf: new BN(1), + messenger.publish( + 'TokensController:stateChange', + { + tokens: [], + detectedTokens: [], + ignoredTokens: [], + allDetectedTokens: {}, + allIgnoredTokens: {}, + allTokens: { + [chainId]: { + [accountAddress]: [ + { address: tokenAddress, decimals: 0, symbol: 'S' }, + ], + }, + }, }, - }); - - await controller.updateBalances(); + [], + ); - expect(controller.state.contractBalances).toStrictEqual({}); + await advanceTime({ clock, duration: 1 }); - controller.enable(); - await triggerTokensStateChange({ - ...getDefaultTokensState(), - tokens: [ - { - address: '0x00', - symbol: 'FOO', - decimals: 18, + expect(controller.state.tokenBalances).toStrictEqual({ + [accountAddress]: { + [chainId]: { + [tokenAddress]: toHex(balance), }, - ], - }); - - expect(controller.state.contractBalances).toStrictEqual({ - '0x00': toHex(new BN(1)), + }, }); }); - it('should not update balances if tokens change and controller is manually disabled', async () => { - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const { controller, triggerTokensStateChange } = setupController({ - config: { - disabled: false, - tokens: [{ address, decimals: 18, symbol: 'EOS', aggregators: [] }], - interval: 10, - }, - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - getBalanceOf: new BN(1), + it('removes balances when tokens are removed', async () => { + const chainId = '0x1'; + const accountAddress = '0x0000000000000000000000000000000000000000'; + const tokenAddress = '0x0000000000000000000000000000000000000001'; + + // Start with a token + const initialTokens = { + allDetectedTokens: {}, + allTokens: { + [chainId]: { + [accountAddress]: [ + { address: tokenAddress, symbol: 's', decimals: 0 }, + ], + }, }, - }); - - await controller.updateBalances(); + }; - expect(controller.state.contractBalances).toStrictEqual({ - '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), + const { controller, messenger } = setupController({ + tokens: initialTokens, }); - controller.disable(); - await triggerTokensStateChange({ - ...getDefaultTokensState(), - tokens: [ - { - address: '0x00', - symbol: 'FOO', - decimals: 18, - }, - ], - }); + // Set initial balance + const balance = 123456; + jest.spyOn(multicall, 'multicallOrFallback').mockResolvedValue([ + { + success: true, + value: new BN(balance), + }, + ]); - expect(controller.state.contractBalances).toStrictEqual({ - '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), - }); - }); + await controller._executePoll({ chainId }); - it('should clear previous interval', async () => { - const { controller } = setupController({ - config: { - interval: 1337, - }, - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - getBalanceOf: new BN(1), + // Verify initial balance is set + expect(controller.state.tokenBalances).toStrictEqual({ + [accountAddress]: { + [chainId]: { + [tokenAddress]: toHex(balance), + }, }, }); - const mockClearTimeout = jest.spyOn(global, 'clearTimeout'); - - await controller.poll(1338); + // Publish an update with no tokens + messenger.publish( + 'TokensController:stateChange', + { + tokens: [], + detectedTokens: [], + ignoredTokens: [], + allDetectedTokens: {}, + allIgnoredTokens: {}, + allTokens: { [chainId]: {} }, + }, + [], + ); - jest.advanceTimersByTime(1339); + await advanceTime({ clock, duration: 1 }); - expect(mockClearTimeout).toHaveBeenCalled(); + // Verify balance was removed + expect(controller.state.tokenBalances).toStrictEqual({ + [accountAddress]: { + [chainId]: {}, // Empty balances object + }, + }); }); - it('should update all balances', async () => { - const selectedAddress = '0x0000000000000000000000000000000000000001'; - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const tokens: Token[] = [ - { - address, - decimals: 18, - symbol: 'EOS', - aggregators: [], - }, - ]; - const { controller } = setupController({ - config: { - interval: 1337, - tokens, - }, - mock: { - selectedAccount: createMockInternalAccount({ - address: selectedAddress, - }), - getBalanceOf: new BN(1), + it('updates balances for all accounts when multi-account balances is enabled', async () => { + const chainId = '0x1'; + const account1 = '0x0000000000000000000000000000000000000001'; + const account2 = '0x0000000000000000000000000000000000000002'; + const tokenAddress = '0x0000000000000000000000000000000000000003'; + + const tokens = { + allDetectedTokens: {}, + allTokens: { + [chainId]: { + [account1]: [{ address: tokenAddress, symbol: 's', decimals: 0 }], + [account2]: [{ address: tokenAddress, symbol: 's', decimals: 0 }], + }, }, - }); + }; - expect(controller.state.contractBalances).toStrictEqual({}); + const { controller, messenger } = setupController({ tokens }); - await controller.updateBalances(); + // Enable multi account balances + messenger.publish( + 'PreferencesController:stateChange', + { isMultiAccountBalancesEnabled: true } as PreferencesState, + [], + ); - expect(tokens[0].hasBalanceError).toBe(false); - expect(Object.keys(controller.state.contractBalances)).toContain(address); - expect(controller.state.contractBalances[address]).not.toBe(toHex(0)); - }); + const balance1 = 100; + const balance2 = 200; + jest.spyOn(multicall, 'multicallOrFallback').mockResolvedValue([ + { success: true, value: new BN(balance1) }, + { success: true, value: new BN(balance2) }, + ]); - it('should handle `getERC20BalanceOf` error case', async () => { - const errorMsg = 'Failed to get balance'; - const address = '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0'; - const tokens: Token[] = [ - { - address, - decimals: 18, - symbol: 'EOS', - aggregators: [], - }, - ]; + await controller._executePoll({ chainId }); - const { controller, mockGetERC20BalanceOf } = setupController({ - config: { - interval: 1337, - tokens, + expect(controller.state.tokenBalances).toStrictEqual({ + [account1]: { + [chainId]: { + [tokenAddress]: toHex(balance1), + }, }, - mock: { - selectedAccount: createMockInternalAccount({ - address, - }), + [account2]: { + [chainId]: { + [tokenAddress]: toHex(balance2), + }, }, }); + }); - // @ts-expect-error Testing error case - mockGetERC20BalanceOf.mockReturnValueOnce(new Error(errorMsg)); - - expect(controller.state.contractBalances).toStrictEqual({}); - - await controller.updateBalances(); + it('only updates selected account balance when multi-account balances is disabled', async () => { + const chainId = '0x1'; + const selectedAccount = '0x0000000000000000000000000000000000000000'; + const otherAccount = '0x0000000000000000000000000000000000000001'; + const tokenAddress = '0x0000000000000000000000000000000000000002'; + + const tokens = { + allDetectedTokens: {}, + allTokens: { + [chainId]: { + [selectedAccount]: [ + { address: tokenAddress, symbol: 's', decimals: 0 }, + ], + [otherAccount]: [{ address: tokenAddress, symbol: 's', decimals: 0 }], + }, + }, + }; - expect(tokens[0].hasBalanceError).toBe(true); - expect(controller.state.contractBalances[address]).toBe(toHex(0)); + const { controller, messenger } = setupController({ tokens }); - mockGetERC20BalanceOf.mockReturnValueOnce(new BN(1)); - await controller.updateBalances(); + // Disable multi-account balances + messenger.publish( + 'PreferencesController:stateChange', + { isMultiAccountBalancesEnabled: false } as PreferencesState, + [], + ); - expect(tokens[0].hasBalanceError).toBe(false); - expect(Object.keys(controller.state.contractBalances)).toContain(address); - expect(controller.state.contractBalances[address]).not.toBe(0); - }); + const balance = 100; + jest + .spyOn(multicall, 'multicallOrFallback') + .mockResolvedValue([{ success: true, value: new BN(balance) }]); - it('should update balances when tokens change', async () => { - const { controller, triggerTokensStateChange } = setupController({ - config: { - interval: 1337, - }, - mock: { - selectedAccount: createMockInternalAccount({ - address: '0x1234', - }), - getBalanceOf: new BN(1), - }, - }); + await controller._executePoll({ chainId }); - const updateBalancesSpy = jest.spyOn(controller, 'updateBalances'); - - await triggerTokensStateChange({ - ...getDefaultTokensState(), - tokens: [ - { - address: '0x00', - symbol: 'FOO', - decimals: 18, + // Should only contain balance for selected account + expect(controller.state.tokenBalances).toStrictEqual({ + [selectedAccount]: { + [chainId]: { + [tokenAddress]: toHex(balance), }, - ], + }, }); - - expect(updateBalancesSpy).toHaveBeenCalled(); }); - it('should update token balances when detected tokens are added', async () => { - const { controller, triggerTokensStateChange } = setupController({ - config: { - interval: 1337, - }, - mock: { - selectedAccount: createMockInternalAccount({ - address: '0x1234', - }), - getBalanceOf: new BN(1), + it('removes balances when networks are deleted', async () => { + const chainId = '0x1'; + const accountAddress = '0x0000000000000000000000000000000000000000'; + const tokenAddress = '0x0000000000000000000000000000000000000001'; + + // Start with a token balance + const initialState = { + tokenBalances: { + [accountAddress]: { + [chainId]: { + [tokenAddress]: toHex(123456), + }, + }, }, + }; + + const { controller, messenger } = setupController({ + config: { state: initialState }, }); - expect(controller.state.contractBalances).toStrictEqual({}); + // Verify initial state matches + expect(controller.state.tokenBalances).toStrictEqual( + initialState.tokenBalances, + ); - await triggerTokensStateChange({ - ...getDefaultTokensState(), - detectedTokens: [ + // Simulate network deletion by publishing a network state change + messenger.publish( + 'NetworkController:stateChange', + { + networkConfigurationsByChainId: {}, + } as NetworkState, + [ { - address: '0x02', - decimals: 18, - image: undefined, - symbol: 'bar', - isERC721: false, + op: 'remove', + path: ['networkConfigurationsByChainId', chainId], }, ], - tokens: [], - }); + ); - expect(controller.state.contractBalances).toStrictEqual({ - '0x02': toHex(new BN(1)), - }); + // Verify the balances for the deleted network were removed + expect( + controller.state.tokenBalances[accountAddress][chainId], + ).toBeUndefined(); }); describe('resetState', () => { it('resets the state to default state', () => { const initialState: TokenBalancesControllerState = { - contractBalances: { - '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), + tokenBalances: { + '0x0000000000000000000000000000000000000001': { + '0x1': { + '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0': toHex(new BN(1)), + }, + }, }, }; const { controller } = setupController({ - config: { - state: initialState, - disabled: true, - }, - mock: { - selectedAccount: createMockInternalAccount({ address: '0x1234' }), - }, + config: { state: initialState }, }); expect(controller.state).toStrictEqual(initialState); @@ -482,7 +473,7 @@ describe('TokenBalancesController', () => { controller.resetState(); expect(controller.state).toStrictEqual({ - contractBalances: {}, + tokenBalances: {}, }); }); }); diff --git a/packages/assets-controllers/src/TokenBalancesController.ts b/packages/assets-controllers/src/TokenBalancesController.ts index 0c0cb505ef..96c89392ac 100644 --- a/packages/assets-controllers/src/TokenBalancesController.ts +++ b/packages/assets-controllers/src/TokenBalancesController.ts @@ -1,49 +1,70 @@ +import { Contract } from '@ethersproject/contracts'; +import { Web3Provider } from '@ethersproject/providers'; import type { AccountsControllerGetSelectedAccountAction } from '@metamask/accounts-controller'; import type { RestrictedControllerMessenger, ControllerGetStateAction, ControllerStateChangeEvent, } from '@metamask/base-controller'; -import { BaseController } from '@metamask/base-controller'; -import { safelyExecute, toHex } from '@metamask/controller-utils'; - -import type { AssetsContractControllerGetERC20BalanceOfAction } from './AssetsContractController'; +import { toChecksumHexAddress, toHex } from '@metamask/controller-utils'; +import { abiERC20 } from '@metamask/metamask-eth-abis'; +import type { + NetworkControllerGetNetworkClientByIdAction, + NetworkControllerGetStateAction, + NetworkControllerStateChangeEvent, + NetworkState, +} from '@metamask/network-controller'; +import { StaticIntervalPollingController } from '@metamask/polling-controller'; +import type { + PreferencesControllerGetStateAction, + PreferencesControllerStateChangeEvent, + PreferencesState, +} from '@metamask/preferences-controller'; +import type { Hex } from '@metamask/utils'; +import type BN from 'bn.js'; +import type { Patch } from 'immer'; +import { isEqual } from 'lodash'; + +import type { MulticallResult } from './multicall'; +import { multicallOrFallback } from './multicall'; import type { Token } from './TokenRatesController'; -import type { TokensControllerStateChangeEvent } from './TokensController'; +import type { + TokensControllerGetStateAction, + TokensControllerState, + TokensControllerStateChangeEvent, +} from './TokensController'; const DEFAULT_INTERVAL = 180000; const controllerName = 'TokenBalancesController'; const metadata = { - contractBalances: { persist: true, anonymous: false }, + tokenBalances: { persist: true, anonymous: false }, }; /** * Token balances controller options * @property interval - Polling interval used to fetch new token balances. - * @property tokens - List of tokens to track balances for. - * @property disabled - If set to true, all tracked tokens contract balances updates are blocked. + * @property messenger - A controller messenger. + * @property state - Initial state for the controller. */ type TokenBalancesControllerOptions = { interval?: number; - tokens?: Token[]; - disabled?: boolean; messenger: TokenBalancesControllerMessenger; state?: Partial; }; /** - * Represents a mapping of hash token contract addresses to their balances. + * A mapping from account address to chain id to token address to balance. */ -type ContractBalances = Record; +type TokenBalances = Record>>; /** * Token balances controller state - * @property contractBalances - Hash of token contract addresses to balances + * @property tokenBalances - A mapping from account address to chain id to token address to balance. */ export type TokenBalancesControllerState = { - contractBalances: ContractBalances; + tokenBalances: TokenBalances; }; export type TokenBalancesControllerGetStateAction = ControllerGetStateAction< @@ -55,8 +76,11 @@ export type TokenBalancesControllerActions = TokenBalancesControllerGetStateAction; export type AllowedActions = - | AccountsControllerGetSelectedAccountAction - | AssetsContractControllerGetERC20BalanceOfAction; + | NetworkControllerGetNetworkClientByIdAction + | NetworkControllerGetStateAction + | TokensControllerGetStateAction + | PreferencesControllerGetStateAction + | AccountsControllerGetSelectedAccountAction; export type TokenBalancesControllerStateChangeEvent = ControllerStateChangeEvent< @@ -67,7 +91,10 @@ export type TokenBalancesControllerStateChangeEvent = export type TokenBalancesControllerEvents = TokenBalancesControllerStateChangeEvent; -export type AllowedEvents = TokensControllerStateChangeEvent; +export type AllowedEvents = + | TokensControllerStateChangeEvent + | PreferencesControllerStateChangeEvent + | NetworkControllerStateChangeEvent; export type TokenBalancesControllerMessenger = RestrictedControllerMessenger< typeof controllerName, @@ -84,41 +111,40 @@ export type TokenBalancesControllerMessenger = RestrictedControllerMessenger< */ export function getDefaultTokenBalancesState(): TokenBalancesControllerState { return { - contractBalances: {}, + tokenBalances: {}, }; } +/** The input to start polling for the {@link TokenBalancesController} */ +export type TokenBalancesPollingInput = { + chainId: Hex; +}; + /** * Controller that passively polls on a set interval token balances * for tokens stored in the TokensController */ -export class TokenBalancesController extends BaseController< +export class TokenBalancesController extends StaticIntervalPollingController()< typeof controllerName, TokenBalancesControllerState, TokenBalancesControllerMessenger > { - #handle?: ReturnType; + #queryMultipleAccounts: boolean; - #interval: number; + #allTokens: TokensControllerState['allTokens']; - #tokens: Token[]; - - #disabled: boolean; + #allDetectedTokens: TokensControllerState['allDetectedTokens']; /** * Construct a Token Balances Controller. * * @param options - The controller options. * @param options.interval - Polling interval used to fetch new token balances. - * @param options.tokens - List of tokens to track balances for. - * @param options.disabled - If set to true, all tracked tokens contract balances updates are blocked. * @param options.state - Initial state to set on this controller. * @param options.messenger - The controller restricted messenger. */ constructor({ interval = DEFAULT_INTERVAL, - tokens = [], - disabled = false, messenger, state = {}, }: TokenBalancesControllerOptions) { @@ -132,92 +158,222 @@ export class TokenBalancesController extends BaseController< }, }); - this.#disabled = disabled; - this.#interval = interval; - this.#tokens = tokens; + this.setIntervalLength(interval); + + // Set initial preference for querying multiple accounts, and subscribe to changes + this.#queryMultipleAccounts = this.#calculateQueryMultipleAccounts( + this.messagingSystem.call('PreferencesController:getState'), + ); + this.messagingSystem.subscribe( + 'PreferencesController:stateChange', + this.#onPreferencesStateChange.bind(this), + ); + + // Set initial tokens, and subscribe to changes + ({ + allTokens: this.#allTokens, + allDetectedTokens: this.#allDetectedTokens, + } = this.messagingSystem.call('TokensController:getState')); this.messagingSystem.subscribe( 'TokensController:stateChange', - ({ tokens: newTokens, detectedTokens }) => { - this.#tokens = [...newTokens, ...detectedTokens]; - // TODO: Either fix this lint violation or explain why it's necessary to ignore. - // eslint-disable-next-line @typescript-eslint/no-floating-promises - this.updateBalances(); - }, + this.#onTokensStateChange.bind(this), ); - // TODO: Either fix this lint violation or explain why it's necessary to ignore. - // eslint-disable-next-line @typescript-eslint/no-floating-promises - this.poll(); + // Subscribe to network state changes + this.messagingSystem.subscribe( + 'NetworkController:stateChange', + this.#onNetworkStateChange.bind(this), + ); } /** - * Allows controller to update tracked tokens contract balances. + * Determines whether to query all accounts, or just the selected account. + * @param preferences - The preferences state. + * @param preferences.isMultiAccountBalancesEnabled - whether to query all accounts (mobile). + * @param preferences.useMultiAccountBalanceChecker - whether to query all accounts (extension). + * @returns true if all accounts should be queried. */ - enable() { - this.#disabled = false; - } + #calculateQueryMultipleAccounts = ({ + isMultiAccountBalancesEnabled, + useMultiAccountBalanceChecker, + }: PreferencesState & { useMultiAccountBalanceChecker?: boolean }) => { + return Boolean( + // Note: These settings have different names on extension vs mobile + isMultiAccountBalancesEnabled || useMultiAccountBalanceChecker, + ); + }; /** - * Blocks controller from updating tracked tokens contract balances. + * Handles the event for preferences state changes. + * @param preferences - The preferences state. */ - disable() { - this.#disabled = true; - } + #onPreferencesStateChange = (preferences: PreferencesState) => { + // Update the user preference for whether to query multiple accounts. + const queryMultipleAccounts = + this.#calculateQueryMultipleAccounts(preferences); + + // Refresh when flipped off -> on + const refresh = queryMultipleAccounts && !this.#queryMultipleAccounts; + this.#queryMultipleAccounts = queryMultipleAccounts; + + if (refresh) { + this.updateBalances().catch(console.error); + } + }; /** - * Starts a new polling interval. - * - * @param interval - Polling interval used to fetch new token balances. + * Handles the event for tokens state changes. + * @param state - The token state. + * @param state.allTokens - The state for imported tokens across all chains. + * @param state.allDetectedTokens - The state for detected tokens across all chains. */ - async poll(interval?: number): Promise { - if (interval) { - this.#interval = interval; - } + #onTokensStateChange = ({ + allTokens, + allDetectedTokens, + }: TokensControllerState) => { + // Refresh token balances on chains whose tokens have changed. + const chainIds = this.#getChainIds(allTokens, allDetectedTokens); + const chainIdsToUpdate = chainIds.filter( + (chainId) => + !isEqual(this.#allTokens[chainId], allTokens[chainId]) || + !isEqual(this.#allDetectedTokens[chainId], allDetectedTokens[chainId]), + ); + + this.#allTokens = allTokens; + this.#allDetectedTokens = allDetectedTokens; - if (this.#handle) { - clearTimeout(this.#handle); + this.updateBalances({ chainIds: chainIdsToUpdate }).catch(console.error); + }; + + /** + * Handles the event for network state changes. + * @param _ - The network state. + * @param patches - An array of patch operations performed on the network state. + */ + #onNetworkStateChange(_: NetworkState, patches: Patch[]) { + // Remove state for deleted networks + for (const patch of patches) { + if ( + patch.op === 'remove' && + patch.path[0] === 'networkConfigurationsByChainId' + ) { + const removedChainId = patch.path[1] as Hex; + + this.update((state) => { + for (const accountAddress of Object.keys(state.tokenBalances)) { + delete state.tokenBalances[accountAddress as Hex][removedChainId]; + } + }); + } } + } - await safelyExecute(() => this.updateBalances()); + /** + * Returns an array of chain ids that have tokens. + * @param allTokens - The state for imported tokens across all chains. + * @param allDetectedTokens - The state for detected tokens across all chains. + * @returns An array of chain ids that have tokens. + */ + #getChainIds = ( + allTokens: TokensControllerState['allTokens'], + allDetectedTokens: TokensControllerState['allDetectedTokens'], + ) => + [ + ...new Set([ + ...Object.keys(allTokens), + ...Object.keys(allDetectedTokens), + ]), + ] as Hex[]; - this.#handle = setTimeout(() => { - // TODO: Either fix this lint violation or explain why it's necessary to ignore. - // eslint-disable-next-line @typescript-eslint/no-floating-promises - this.poll(this.#interval); - }, this.#interval); + /** + * Polls for erc20 token balances. + * @param input - The input for the poll. + * @param input.chainId - The chain id to poll token balances on. + */ + async _executePoll({ chainId }: TokenBalancesPollingInput) { + await this.updateBalancesByChainId({ chainId }); } /** - * Updates balances for all tokens. + * Updates the token balances for the given chain ids. + * @param input - The input for the update. + * @param input.chainIds - The chain ids to update token balances for. + * Or omitted to update all chains that contain tokens. */ - async updateBalances() { - if (this.#disabled) { - return; - } - const selectedInternalAccount = this.messagingSystem.call( + async updateBalances({ chainIds }: { chainIds?: Hex[] } = {}) { + chainIds ??= this.#getChainIds(this.#allTokens, this.#allDetectedTokens); + + await Promise.allSettled( + chainIds.map((chainId) => this.updateBalancesByChainId({ chainId })), + ); + } + + /** + * Updates token balances for the given chain id. + * @param input - The input for the update. + * @param input.chainId - The chain id to update token balances on. + */ + async updateBalancesByChainId({ chainId }: { chainId: Hex }) { + const { address: selectedAccountAddress } = this.messagingSystem.call( 'AccountsController:getSelectedAccount', ); - const newContractBalances: ContractBalances = {}; - for (const token of this.#tokens) { - const { address } = token; - try { - const balance = await this.messagingSystem.call( - 'AssetsContractController:getERC20BalanceOf', - address, - selectedInternalAccount.address, - ); - newContractBalances[address] = toHex(balance); - token.hasBalanceError = false; - } catch (error) { - newContractBalances[address] = toHex(0); - token.hasBalanceError = true; - } + const isSelectedAccount = (accountAddress: string) => + toChecksumHexAddress(accountAddress) === + toChecksumHexAddress(selectedAccountAddress); + + const accountTokenPairs: { accountAddress: Hex; tokenAddress: Hex }[] = []; + + const addTokens = ([accountAddress, tokens]: [string, Token[]]) => + this.#queryMultipleAccounts || isSelectedAccount(accountAddress) + ? tokens.forEach((t) => + accountTokenPairs.push({ + accountAddress: accountAddress as Hex, + tokenAddress: t.address as Hex, + }), + ) + : undefined; + + // Balances will be updated for both imported and detected tokens + Object.entries(this.#allTokens[chainId] ?? {}).forEach(addTokens); + Object.entries(this.#allDetectedTokens[chainId] ?? {}).forEach(addTokens); + + let results: MulticallResult[] = []; + + if (accountTokenPairs.length > 0) { + const provider = new Web3Provider( + this.#getNetworkClient(chainId).provider, + ); + + const calls = accountTokenPairs.map( + ({ accountAddress, tokenAddress }) => ({ + contract: new Contract(tokenAddress, abiERC20, provider), + functionSignature: 'balanceOf(address)', + arguments: [accountAddress], + }), + ); + + results = await multicallOrFallback(calls, chainId, provider); } this.update((state) => { - state.contractBalances = newContractBalances; + // Reset so that when accounts or tokens are removed, + // their balances are removed rather than left stale. + for (const accountAddress of Object.keys(state.tokenBalances)) { + state.tokenBalances[accountAddress as Hex][chainId] = {}; + } + + for (let i = 0; i < results.length; i++) { + const { success, value } = results[i]; + const { accountAddress, tokenAddress } = accountTokenPairs[i]; + + if (success) { + ((state.tokenBalances[accountAddress] ??= {})[chainId] ??= {})[ + tokenAddress + ] = toHex(value as BN); + } + } }); } @@ -229,6 +385,34 @@ export class TokenBalancesController extends BaseController< return getDefaultTokenBalancesState(); }); } + + /** + * Returns the network client for a given chain id + * @param chainId - The chain id to get the network client for. + * @returns The network client for the given chain id. + */ + #getNetworkClient(chainId: Hex) { + const { networkConfigurationsByChainId } = this.messagingSystem.call( + 'NetworkController:getState', + ); + + const networkConfiguration = networkConfigurationsByChainId[chainId]; + if (!networkConfiguration) { + throw new Error( + `TokenBalancesController: No network configuration found for chainId ${chainId}`, + ); + } + + const { networkClientId } = + networkConfiguration.rpcEndpoints[ + networkConfiguration.defaultRpcEndpointIndex + ]; + + return this.messagingSystem.call( + `NetworkController:getNetworkClientById`, + networkClientId, + ); + } } export default TokenBalancesController; diff --git a/packages/assets-controllers/src/TokenDetectionController.test.ts b/packages/assets-controllers/src/TokenDetectionController.test.ts index 31c82807bb..6a1a69e5ea 100644 --- a/packages/assets-controllers/src/TokenDetectionController.test.ts +++ b/packages/assets-controllers/src/TokenDetectionController.test.ts @@ -8,7 +8,10 @@ import { } from '@metamask/controller-utils'; import type { InternalAccount } from '@metamask/keyring-api'; import type { KeyringControllerState } from '@metamask/keyring-controller'; -import { getDefaultNetworkControllerState } from '@metamask/network-controller'; +import { + getDefaultNetworkControllerState, + RpcEndpointType, +} from '@metamask/network-controller'; import type { NetworkState, NetworkConfiguration, @@ -1014,6 +1017,7 @@ describe('TokenDetectionController', () => { async ({ mockGetAccount, mockTokenListGetState, + mockNetworkState, triggerPreferencesStateChange, triggerSelectedAccountChange, callActionSpy, @@ -1038,6 +1042,26 @@ describe('TokenDetectionController', () => { }, }, }); + mockNetworkState({ + networkConfigurationsByChainId: { + '0x1': { + name: 'ethereum', + nativeCurrency: 'ETH', + rpcEndpoints: [ + { + networkClientId: 'mainnet', + type: RpcEndpointType.Infura, + url: 'https://mainnet.infura.io/v3/{infuraProjectId}', + }, + ], + blockExplorerUrls: [], + chainId: '0x1', + defaultRpcEndpointIndex: 0, + }, + }, + networksMetadata: {}, + selectedNetworkClientId: 'mainnet', + }); triggerPreferencesStateChange({ ...getDefaultPreferencesState(), @@ -1059,6 +1083,85 @@ describe('TokenDetectionController', () => { ); }); + it('should detect new tokens after switching between accounts on different chains', async () => { + const mockGetBalancesInSingleCall = jest.fn().mockResolvedValue({ + [sampleTokenA.address]: new BN(1), + }); + const firstSelectedAccount = createMockInternalAccount({ + address: '0x0000000000000000000000000000000000000001', + }); + const secondSelectedAccount = createMockInternalAccount({ + address: '0x0000000000000000000000000000000000000002', + }); + await withController( + { + options: { + disabled: false, + getBalancesInSingleCall: mockGetBalancesInSingleCall, + useAccountsAPI: true, // USING ACCOUNTS API + }, + mocks: { + getSelectedAccount: firstSelectedAccount, + }, + }, + async ({ + mockGetAccount, + mockTokenListGetState, + mockNetworkState, + triggerPreferencesStateChange, + triggerSelectedAccountChange, + controller, + }) => { + const mockTokens = jest.spyOn(controller, 'detectTokens'); + mockMultiChainAccountsService(); + mockTokenListGetState({ + ...getDefaultTokenListState(), + tokensChainsCache: { + '0x1': { + timestamp: 0, + data: { + [sampleTokenA.address]: { + name: sampleTokenA.name, + symbol: sampleTokenA.symbol, + decimals: sampleTokenA.decimals, + address: sampleTokenA.address, + occurrences: 1, + aggregators: sampleTokenA.aggregators, + iconUrl: sampleTokenA.image, + }, + }, + }, + }, + }); + mockNetworkState({ + ...getDefaultNetworkControllerState(), + selectedNetworkClientId: NetworkType.mainnet, + }); + + triggerPreferencesStateChange({ + ...getDefaultPreferencesState(), + useTokenDetection: true, + }); + mockGetAccount(secondSelectedAccount); + triggerSelectedAccountChange(secondSelectedAccount); + + await advanceTime({ clock, duration: 1 }); + + expect(mockTokens).toHaveBeenNthCalledWith(1, { + chainIds: [ + '0x1', + '0x5', + '0xaa36a7', + '0xe704', + '0xe705', + '0xe708', + ], + selectedAddress: secondSelectedAccount.address, + }); + }, + ); + }); + it('should detect new tokens after enabling token detection', async () => { const mockGetBalancesInSingleCall = jest.fn().mockResolvedValue({ [sampleTokenA.address]: new BN(1), diff --git a/packages/assets-controllers/src/TokenDetectionController.ts b/packages/assets-controllers/src/TokenDetectionController.ts index a2d9a744c1..031fcd4ec4 100644 --- a/packages/assets-controllers/src/TokenDetectionController.ts +++ b/packages/assets-controllers/src/TokenDetectionController.ts @@ -394,12 +394,18 @@ export class TokenDetectionController extends StaticIntervalPollingController { + const { networkConfigurationsByChainId } = this.messagingSystem.call( + 'NetworkController:getState', + ); + + const chainIds = Object.keys(networkConfigurationsByChainId) as Hex[]; const isSelectedAccountIdChanged = this.#selectedAccountId !== selectedAccount.id; if (isSelectedAccountIdChanged) { this.#selectedAccountId = selectedAccount.id; await this.#restartTokenDetection({ selectedAddress: selectedAccount.address, + chainIds, }); } }, @@ -707,20 +713,14 @@ export class TokenDetectionController extends StaticIntervalPollingController null); - if ( - !tokenBalancesByChain || - Object.keys(tokenBalancesByChain).length === 0 - ) { + if (tokenBalancesByChain === null) { return { result: 'failed' } as const; } diff --git a/packages/assets-controllers/src/TokenRatesController.test.ts b/packages/assets-controllers/src/TokenRatesController.test.ts index ad1fccd4eb..4648151769 100644 --- a/packages/assets-controllers/src/TokenRatesController.test.ts +++ b/packages/assets-controllers/src/TokenRatesController.test.ts @@ -17,6 +17,7 @@ import { getDefaultNetworkControllerState } from '@metamask/network-controller'; import type { Hex } from '@metamask/utils'; import { add0x } from '@metamask/utils'; import assert from 'assert'; +import type { Patch } from 'immer'; import nock from 'nock'; import { useFakeTimers } from 'sinon'; @@ -1035,6 +1036,94 @@ describe('TokenRatesController', () => { ); }); }); + + it('removes state when networks are deleted', async () => { + const marketData = { + [ChainId.mainnet]: { + '0x123456': { + currency: 'ETH', + priceChange1d: 0, + pricePercentChange1d: 0, + tokenAddress: '0x02', + allTimeHigh: 4000, + allTimeLow: 900, + circulatingSupply: 2000, + dilutedMarketCap: 100, + high1d: 200, + low1d: 100, + marketCap: 1000, + marketCapPercentChange1d: 100, + price: 0.001, + pricePercentChange14d: 100, + pricePercentChange1h: 1, + pricePercentChange1y: 200, + pricePercentChange200d: 300, + pricePercentChange30d: 200, + pricePercentChange7d: 100, + totalVolume: 100, + }, + }, + [ChainId['linea-mainnet']]: { + '0x789': { + currency: 'ETH', + priceChange1d: 0, + pricePercentChange1d: 0, + tokenAddress: '0x02', + allTimeHigh: 4000, + allTimeLow: 900, + circulatingSupply: 2000, + dilutedMarketCap: 100, + high1d: 200, + low1d: 100, + marketCap: 1000, + marketCapPercentChange1d: 100, + price: 0.001, + pricePercentChange14d: 100, + pricePercentChange1h: 1, + pricePercentChange1y: 200, + pricePercentChange200d: 300, + pricePercentChange30d: 200, + pricePercentChange7d: 100, + totalVolume: 100, + }, + }, + } as const; + + await withController( + { + options: { + state: { + marketData, + }, + }, + }, + async ({ controller, triggerNetworkStateChange }) => { + // Verify initial state with both networks + expect(controller.state.marketData).toStrictEqual(marketData); + + triggerNetworkStateChange( + { + selectedNetworkClientId: 'mainnet', + networkConfigurationsByChainId: {}, + } as NetworkState, + [ + { + op: 'remove', + path: [ + 'networkConfigurationsByChainId', + ChainId['linea-mainnet'], + ], + }, + ], + ); + + // Verify linea removed + expect(controller.state.marketData).toStrictEqual({ + [ChainId.mainnet]: marketData[ChainId.mainnet], + }); + }, + ); + }); }); describe('PreferencesController::stateChange', () => { @@ -2259,6 +2348,55 @@ describe('TokenRatesController', () => { ); }); + it('correctly calls the Price API with unqiue native token addresses (e.g. MATIC)', async () => { + const tokenPricesService = buildMockTokenPricesService({ + fetchTokenPrices: jest.fn().mockResolvedValue({ + '0x0000000000000000000000000000000000001010': { + currency: 'MATIC', + tokenAddress: '0x0000000000000000000000000000000000001010', + value: 0.001, + }, + }), + }); + + await withController( + { + options: { tokenPricesService }, + mockNetworkClientConfigurationsByNetworkClientId: { + 'AAAA-BBBB-CCCC-DDDD': buildCustomNetworkClientConfiguration({ + chainId: '0x89', + }), + }, + }, + async ({ + controller, + triggerTokensStateChange, + triggerNetworkStateChange, + }) => { + await callUpdateExchangeRatesMethod({ + allTokens: { + '0x89': { + [defaultSelectedAddress]: [], + }, + }, + chainId: '0x89', + controller, + triggerTokensStateChange, + triggerNetworkStateChange, + method, + nativeCurrency: 'MATIC', + selectedNetworkClientId: 'AAAA-BBBB-CCCC-DDDD', + }); + + expect( + controller.state.marketData['0x89'][ + '0x0000000000000000000000000000000000001010' + ], + ).toBeDefined(); + }, + ); + }); + it('only updates rates once when called twice', async () => { const tokenAddresses = [ '0x0000000000000000000000000000000000000001', @@ -2410,7 +2548,7 @@ type WithControllerCallback = ({ controller: TokenRatesController; triggerSelectedAccountChange: (state: InternalAccount) => void; triggerTokensStateChange: (state: TokensControllerState) => void; - triggerNetworkStateChange: (state: NetworkState) => void; + triggerNetworkStateChange: (state: NetworkState, patches?: Patch[]) => void; }) => Promise | ReturnValue; type WithControllerOptions = { @@ -2507,8 +2645,15 @@ async function withController( triggerTokensStateChange: (state: TokensControllerState) => { controllerMessenger.publish('TokensController:stateChange', state, []); }, - triggerNetworkStateChange: (state: NetworkState) => { - controllerMessenger.publish('NetworkController:stateChange', state, []); + triggerNetworkStateChange: ( + state: NetworkState, + patches: Patch[] = [], + ) => { + controllerMessenger.publish( + 'NetworkController:stateChange', + state, + patches, + ); }, }); } finally { diff --git a/packages/assets-controllers/src/TokenRatesController.ts b/packages/assets-controllers/src/TokenRatesController.ts index 57d138bc1a..50ffd583c9 100644 --- a/packages/assets-controllers/src/TokenRatesController.ts +++ b/packages/assets-controllers/src/TokenRatesController.ts @@ -27,7 +27,7 @@ import { isEqual } from 'lodash'; import { reduceInBatchesSerially, TOKEN_PRICES_BATCH_SIZE } from './assetsUtil'; import { fetchExchangeRate as fetchNativeCurrencyExchangeRate } from './crypto-compare-service'; import type { AbstractTokenPricesService } from './token-prices-service/abstract-token-prices-service'; -import { ZERO_ADDRESS } from './token-prices-service/codefi-v2'; +import { getNativeTokenAddress } from './token-prices-service/codefi-v2'; import type { TokensControllerGetStateAction, TokensControllerStateChangeEvent, @@ -365,7 +365,7 @@ export class TokenRatesController extends StaticIntervalPollingController { + async ({ selectedNetworkClientId }, patches) => { const { configuration: { chainId, ticker }, } = this.messagingSystem.call( @@ -380,6 +380,19 @@ export class TokenRatesController extends StaticIntervalPollingController { + delete state.marketData[removedChainId]; + }); + } + } }, ); } @@ -705,9 +718,9 @@ export class TokenRatesController extends StaticIntervalPollingController { }, ); }); + + it('should ignore tokens by networkClientId', async () => { + const selectedAddress = '0x0001'; + const otherAddress = '0x0002'; + const selectedAccount = createMockInternalAccount({ + address: selectedAddress, + }); + const otherAccount = createMockInternalAccount({ + address: otherAddress, + }); + + await withController( + { + mocks: { + getSelectedAccount: selectedAccount, + getAccount: selectedAccount, + }, + }, + async ({ controller, triggerSelectedAccountChange, changeNetwork }) => { + // Select the first account + triggerSelectedAccountChange(selectedAccount); + + // Add and ignore a token on Sepolia + changeNetwork({ selectedNetworkClientId: InfuraNetworkType.sepolia }); + await controller.addToken({ + address: '0x01', + symbol: 'Token1', + decimals: 18, + }); + expect(controller.state.tokens).toHaveLength(1); + expect(controller.state.ignoredTokens).toHaveLength(0); + + controller.ignoreTokens(['0x01'], InfuraNetworkType.sepolia); + expect(controller.state.tokens).toHaveLength(0); + expect(controller.state.ignoredTokens).toStrictEqual(['0x01']); + + // Verify that Goerli network has no ignored tokens + changeNetwork({ selectedNetworkClientId: InfuraNetworkType.goerli }); + expect(controller.state.ignoredTokens).toHaveLength(0); + + // Add and ignore a token on Goerli + await controller.addToken({ + address: '0x02', + symbol: 'Token2', + decimals: 8, + }); + controller.ignoreTokens(['0x02'], InfuraNetworkType.goerli); + expect(controller.state.tokens).toHaveLength(0); + expect(controller.state.ignoredTokens).toStrictEqual(['0x02']); + + // Verify that switching back to Sepolia retains its ignored tokens + changeNetwork({ selectedNetworkClientId: InfuraNetworkType.sepolia }); + expect(controller.state.ignoredTokens).toStrictEqual(['0x01']); + + // Switch to a different account on Goerli + triggerSelectedAccountChange(otherAccount); + expect(controller.state.ignoredTokens).toHaveLength(0); + + // Add and ignore a token on the new account + await controller.addToken({ + address: '0x03', + symbol: 'Token3', + decimals: 6, + }); + controller.ignoreTokens(['0x03'], InfuraNetworkType.goerli); + expect(controller.state.ignoredTokens).toStrictEqual(['0x03']); + + // Validate the overall ignored tokens state + expect(controller.state.allIgnoredTokens).toStrictEqual({ + [ChainId.sepolia]: { + [selectedAddress]: ['0x01'], + }, + [ChainId.goerli]: { + [selectedAddress]: ['0x02'], + [otherAddress]: ['0x03'], + }, + }); + }, + ); + }); }); it('should ignore multiple tokens with single ignoreTokens call', async () => { @@ -2447,6 +2529,64 @@ describe('TokensController', () => { }); }); + describe('when NetworkController:stateChange is published', () => { + it('removes tokens for removed networks', async () => { + const initialState = { + allTokens: { + '0x1': { + '0x134': [ + { + address: '0x01', + symbol: 'TKN1', + decimals: 18, + aggregators: [], + name: 'Token 1', + }, + ], + }, + '0x5': { + // goerli + '0x456': [ + { + address: '0x02', + symbol: 'TKN2', + decimals: 18, + aggregators: [], + name: 'Token 2', + }, + ], + }, + }, + tokens: [], + ignoredTokens: [], + detectedTokens: [], + allIgnoredTokens: {}, + allDetectedTokens: {}, + }; + + await withController( + { options: { state: initialState } }, + async ({ controller, triggerNetworkStateChange }) => { + // Verify initial state + expect(controller.state).toStrictEqual(initialState); + + // Simulate removing goerli + triggerNetworkStateChange({} as NetworkState, [ + { + op: 'remove', + path: ['networkConfigurationsByChainId', '0x5'], + }, + ]); + + // Verify tokens were removed on goerli + expect(controller.state.allTokens).toStrictEqual({ + '0x1': initialState.allTokens['0x1'], + }); + }, + ); + }); + }); + describe('resetState', () => { it('resets the state to default state', async () => { const initialState: TokensControllerState = { @@ -2544,6 +2684,10 @@ type WithControllerCallback = ({ messenger: UnrestrictedMessenger; approvalController: ApprovalController; triggerSelectedAccountChange: (internalAccount: InternalAccount) => void; + triggerNetworkStateChange: ( + networkState: NetworkState, + patches: Patch[], + ) => void; getAccountHandler: jest.Mock; getSelectedAccountHandler: jest.Mock; }) => Promise | ReturnValue; @@ -2615,6 +2759,7 @@ async function withController( ], allowedEvents: [ 'NetworkController:networkDidChange', + 'NetworkController:stateChange', 'AccountsController:selectedEvmAccountChange', 'TokenListController:stateChange', ], @@ -2674,12 +2819,20 @@ async function withController( getNetworkClientById, ); + const triggerNetworkStateChange = ( + networkState: NetworkState, + patches: Patch[], + ) => { + messenger.publish('NetworkController:stateChange', networkState, patches); + }; + return await fn({ controller, changeNetwork, messenger, approvalController, triggerSelectedAccountChange, + triggerNetworkStateChange, getAccountHandler, getSelectedAccountHandler, }); diff --git a/packages/assets-controllers/src/TokensController.ts b/packages/assets-controllers/src/TokensController.ts index ba94bb2fb3..d687bf2e51 100644 --- a/packages/assets-controllers/src/TokensController.ts +++ b/packages/assets-controllers/src/TokensController.ts @@ -30,12 +30,14 @@ import type { NetworkClientId, NetworkControllerGetNetworkClientByIdAction, NetworkControllerNetworkDidChangeEvent, + NetworkControllerStateChangeEvent, NetworkState, Provider, } from '@metamask/network-controller'; import { rpcErrors } from '@metamask/rpc-errors'; import type { Hex } from '@metamask/utils'; import { Mutex } from 'async-mutex'; +import type { Patch } from 'immer'; import { v1 as random } from 'uuid'; import { formatAggregatorNames, formatIconUrlWithProxy } from './assetsUtil'; @@ -150,6 +152,7 @@ export type TokensControllerStateChangeEvent = ControllerStateChangeEvent< export type TokensControllerEvents = TokensControllerStateChangeEvent; export type AllowedEvents = + | NetworkControllerStateChangeEvent | NetworkControllerNetworkDidChangeEvent | TokenListStateChange | AccountsControllerSelectedEvmAccountChangeEvent; @@ -246,6 +249,11 @@ export class TokensController extends BaseController< this.#onNetworkDidChange.bind(this), ); + this.messagingSystem.subscribe( + 'NetworkController:stateChange', + this.#onNetworkStateChange.bind(this), + ); + this.messagingSystem.subscribe( 'TokenListController:stateChange', ({ tokenList }) => { @@ -283,6 +291,29 @@ export class TokensController extends BaseController< }); } + /** + * Handles the event when the network state changes. + * @param _ - The network state. + * @param patches - An array of patch operations performed on the network state. + */ + #onNetworkStateChange(_: NetworkState, patches: Patch[]) { + // Remove state for deleted networks + for (const patch of patches) { + if ( + patch.op === 'remove' && + patch.path[0] === 'networkConfigurationsByChainId' + ) { + const removedChainId = patch.path[1] as Hex; + + this.update((state) => { + delete state.allTokens[removedChainId]; + delete state.allIgnoredTokens[removedChainId]; + delete state.allDetectedTokens[removedChainId]; + }); + } + } + } + /** * Handles the selected account change in the accounts controller. * @param selectedAccount - The new selected account @@ -462,16 +493,27 @@ export class TokensController extends BaseController< */ async addTokens(tokensToImport: Token[], networkClientId?: NetworkClientId) { const releaseLock = await this.#mutex.acquire(); - const { ignoredTokens, allDetectedTokens } = this.state; + const { allTokens, ignoredTokens, allDetectedTokens } = this.state; const importedTokensMap: { [key: string]: true } = {}; + + let interactingChainId; + if (networkClientId) { + interactingChainId = this.messagingSystem.call( + 'NetworkController:getNetworkClientById', + networkClientId, + ).configuration.chainId; + } + // Used later to dedupe imported tokens - const newTokensMap = Object.values(tokensToImport).reduce( - (output, token) => { - output[token.address] = token; - return output; - }, - {} as { [address: string]: Token }, - ); + const newTokensMap = [ + ...(allTokens[interactingChainId ?? this.#chainId]?.[ + this.#getSelectedAccount().address + ] || []), + ...tokensToImport, + ].reduce((output, token) => { + output[token.address] = token; + return output; + }, {} as { [address: string]: Token }); try { tokensToImport.forEach((tokenToAdd) => { const { address, symbol, decimals, image, aggregators, name } = @@ -495,14 +537,6 @@ export class TokensController extends BaseController< (tokenAddress) => !newTokensMap[tokenAddress.toLowerCase()], ); - let interactingChainId; - if (networkClientId) { - interactingChainId = this.messagingSystem.call( - 'NetworkController:getNetworkClientById', - networkClientId, - ).configuration.chainId; - } - const detectedTokensForGivenChain = interactingChainId ? allDetectedTokens?.[interactingChainId]?.[this.#getSelectedAddress()] : []; @@ -536,12 +570,24 @@ export class TokensController extends BaseController< * Ignore a batch of tokens. * * @param tokenAddressesToIgnore - Array of token addresses to ignore. + * @param networkClientId - Optional network client ID used to determine interacting chain ID. */ - ignoreTokens(tokenAddressesToIgnore: string[]) { + ignoreTokens( + tokenAddressesToIgnore: string[], + networkClientId?: NetworkClientId, + ) { const { ignoredTokens, detectedTokens, tokens } = this.state; const ignoredTokensMap: { [key: string]: true } = {}; let newIgnoredTokens: string[] = [...ignoredTokens]; + let interactingChainId; + if (networkClientId) { + interactingChainId = this.messagingSystem.call( + 'NetworkController:getNetworkClientById', + networkClientId, + ).configuration.chainId; + } + const checksummedTokenAddresses = tokenAddressesToIgnore.map((address) => { const checksumAddress = toChecksumHexAddress(address); ignoredTokensMap[address.toLowerCase()] = true; @@ -560,6 +606,7 @@ export class TokensController extends BaseController< newIgnoredTokens, newDetectedTokens, newTokens, + interactingChainId, }); this.update((state) => { diff --git a/packages/assets-controllers/src/assetsUtil.test.ts b/packages/assets-controllers/src/assetsUtil.test.ts index 5a52a98254..f32d2b3ae8 100644 --- a/packages/assets-controllers/src/assetsUtil.test.ts +++ b/packages/assets-controllers/src/assetsUtil.test.ts @@ -551,6 +551,35 @@ describe('assetsUtil', () => { expect(Object.keys(timestampsByIndex)).toHaveLength(3); expect(timestampsIncreasing).toBe(true); }); + + it('works when the result is an array', async () => { + const results = await assetsUtil.reduceInBatchesSerially< + string, + string[] + >({ + values: ['a', 'b', 'c', 'd', 'e', 'f'], + batchSize: 2, + eachBatch: async (workingResult, batch) => { + return [...workingResult, ...batch.map((s) => s.toUpperCase())]; + }, + initialResult: [], + }); + + expect(results).toStrictEqual(['A', 'B', 'C', 'D', 'E', 'F']); + }); + + it('works when the result is a number', async () => { + const results = await assetsUtil.reduceInBatchesSerially({ + values: [1, 2, 3, 4, 5], + batchSize: 2, + eachBatch: async (workingResult, batch) => { + return workingResult + batch.reduce((a, b) => a + b, 0); + }, + initialResult: 0, + }); + + expect(results).toBe(15); + }); }); describe('fetchAndMapExchangeRates', () => { diff --git a/packages/assets-controllers/src/assetsUtil.ts b/packages/assets-controllers/src/assetsUtil.ts index 5392a2419d..4cd2aef222 100644 --- a/packages/assets-controllers/src/assetsUtil.ts +++ b/packages/assets-controllers/src/assetsUtil.ts @@ -331,7 +331,7 @@ export function divideIntoBatches( } /** - * Constructs an object from processing batches of the given values + * Constructs a result from processing batches of the given values * sequentially. * * @param args - The arguments to this function. @@ -343,12 +343,9 @@ export function divideIntoBatches( * and the index, and should return an updated version of the object. * @param args.initialResult - The initial value of the final data structure, * i.e., the value that will be fed into the first call of `eachBatch`. - * @returns The built object. + * @returns The built result. */ -export async function reduceInBatchesSerially< - Value, - Result extends Record, ->({ +export async function reduceInBatchesSerially({ values, batchSize, eachBatch, diff --git a/packages/assets-controllers/src/index.ts b/packages/assets-controllers/src/index.ts index 664642bbad..aaba11c846 100644 --- a/packages/assets-controllers/src/index.ts +++ b/packages/assets-controllers/src/index.ts @@ -134,6 +134,7 @@ export { export { CodefiTokenPricesServiceV2, SUPPORTED_CHAIN_IDS, + getNativeTokenAddress, } from './token-prices-service'; export { RatesController, Cryptocurrency } from './RatesController'; export type { diff --git a/packages/assets-controllers/src/multicall.test.ts b/packages/assets-controllers/src/multicall.test.ts new file mode 100644 index 0000000000..e8692ac5c3 --- /dev/null +++ b/packages/assets-controllers/src/multicall.test.ts @@ -0,0 +1,125 @@ +import { defaultAbiCoder } from '@ethersproject/abi'; +import { Contract } from '@ethersproject/contracts'; +import { Web3Provider } from '@ethersproject/providers'; +import { abiERC20 } from '@metamask/metamask-eth-abis'; + +import { multicallOrFallback } from './multicall'; + +const provider = new Web3Provider(jest.fn()); + +describe('multicall', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should return empty results for empty calls', async () => { + const results = await multicallOrFallback([], '0x1', provider); + expect(results).toStrictEqual([]); + }); + + describe('when calls are non empty', () => { + // Mock mutiple calls + const call = (accountAddress: string, tokenAddress: string) => ({ + contract: new Contract(tokenAddress, abiERC20, provider), + functionSignature: 'balanceOf(address)', + arguments: [accountAddress], + }); + + const calls = [ + call( + '0x0000000000000000000000000000000000000000', + '0x0000000000000000000000000000000000000001', + ), + call( + '0x0000000000000000000000000000000000000002', + '0x0000000000000000000000000000000000000003', + ), + ]; + + it('should return results via multicall on supported chains', async () => { + // Mock return value for the single multicall + jest + .spyOn(provider, 'call') + .mockResolvedValue( + defaultAbiCoder.encode( + ['tuple(bool,bytes)[]'], + [ + calls.map((_, i) => [ + true, + defaultAbiCoder.encode(['uint256'], [i + 1]), + ]), + ], + ), + ); + + const results = await multicallOrFallback(calls, '0x1', provider); + expect(results).toMatchObject([ + { + success: true, + // eslint-disable-next-line @typescript-eslint/naming-convention + value: { _hex: '0x01' }, + }, + { + success: true, + // eslint-disable-next-line @typescript-eslint/naming-convention + value: { _hex: '0x02' }, + }, + ]); + }); + + it('should handle the multicall contract returning false for success', async () => { + // Mock an unsuccessful multicall + jest + .spyOn(provider, 'call') + .mockResolvedValue( + defaultAbiCoder.encode( + ['tuple(bool,bytes)[]'], + [ + calls.map((_, i) => [ + false, + defaultAbiCoder.encode(['uint256'], [i + 1]), + ]), + ], + ), + ); + + const results = await multicallOrFallback(calls, '0x1', provider); + expect(results).toMatchObject([ + { + success: false, + value: undefined, + }, + { + success: false, + value: undefined, + }, + ]); + }); + + it('should fallback to parallel calls on unsupported chains', async () => { + // Mock return values for each call + let timesCalled = 0; + jest + .spyOn(provider, 'call') + .mockImplementation(() => + Promise.resolve( + defaultAbiCoder.encode(['uint256'], [(timesCalled += 1)]), + ), + ); + + const results = await multicallOrFallback(calls, '0x123456789', provider); + expect(results).toMatchObject([ + { + success: true, + // eslint-disable-next-line @typescript-eslint/naming-convention + value: { _hex: '0x01' }, + }, + { + success: true, + // eslint-disable-next-line @typescript-eslint/naming-convention + value: { _hex: '0x02' }, + }, + ]); + }); + }); +}); diff --git a/packages/assets-controllers/src/multicall.ts b/packages/assets-controllers/src/multicall.ts new file mode 100644 index 0000000000..e5be69f09d --- /dev/null +++ b/packages/assets-controllers/src/multicall.ts @@ -0,0 +1,398 @@ +import { Contract } from '@ethersproject/contracts'; +import type { Web3Provider } from '@ethersproject/providers'; +import type { Hex } from '@metamask/utils'; + +import { reduceInBatchesSerially } from './assetsUtil'; + +// https://github.com/mds1/multicall/blob/main/deployments.json +const MULTICALL_CONTRACT_BY_CHAINID = { + '0x1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x5': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xaa36a7': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4268': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x5e9': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1b6e6': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x18fc4a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x45': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1a4': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xaa37dc': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa4b1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa4ba': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x66eed': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x66eee': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x66eeb': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x15f2249': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x89': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x13881': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x13882': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x44d': '0xca11bde05977b3631167028862be2a173976ca11', + '0x5a2': '0xca11bde05977b3631167028862be2a173976ca11', + '0x98a': '0xca11bde05977b3631167028862be2a173976ca11', + '0x64': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x27d8': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa86a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa869': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xfa2': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xfa': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xfaf0': '0xca11bde05977b3631167028862be2a173976ca11', + '0x38': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x61': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x15eb': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xcc': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x504': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x505': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x507': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2a15c308d': '0xca11bde05977b3631167028862be2a173976ca11', + '0x2a15c3083': '0xca11bde05977b3631167028862be2a173976ca11', + '0x63564c40': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x19': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x152': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x5535072': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x6c1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x7a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xe': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x13': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x10': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x72': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x120': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4e454152': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x250': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x5c2359': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xec0': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x42': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x80': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x440': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x257': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xe9fe': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xd3a0': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x84444': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1e': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2329': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2328': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x6c': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x12': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa516': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x5afe': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa4ec': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xaef3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x116ea': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x116e9': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2019': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3e9': '0xca11bde05977b3631167028862be2a173976ca11', + '0x7d1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x141': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x6a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x28': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4d2': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1e14': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1e15': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1251': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x7f08': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x8ae': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x138b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1389': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1388': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1f92': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x14a33': '0xca11bde05977b3631167028862be2a173976ca11', + '0x14a34': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2105': '0xca11bde05977b3631167028862be2a173976ca11', + '0x936': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xff': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x46a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x46b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x8a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x14f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xd2af': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xe9ac0ce': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xe705': '0xca11bde05977b3631167028862be2a173976ca11', + '0xe704': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xe708': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2b6f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x39': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x23a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1644': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xdea8': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3af': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x171': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3e7': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x76adf1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3b9ac9ff': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2c': '0xca11bde05977b3631167028862be2a173976ca11', + '0x2e': '0xca11bde05977b3631167028862be2a173976ca11', + '0x15b3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x82751': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x8274f': '0xca11bde05977b3631167028862be2a173976ca11', + '0x82750': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x96f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3cc5': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4571': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xe99': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x7d0': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1297': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1d5e': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3a14269b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x561bf78b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x235ddd0': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3cd156dc': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x5d456c62': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x79f99296': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x585eb4b1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x507aaa2a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1fc3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x32d': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x8a73': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x8a72': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x8a71': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xe9ac0d6': '0xca11bde05977b3631167028862be2a173976ca11', + '0x1069': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x7e5': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x53': '0xca11bde05977b3631167028862be2a173976ca11', + '0x52': '0xca11bde05977b3631167028862be2a173976ca11', + '0xe298': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1a8': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x94': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2c6': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2803': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2802': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa9': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x28c5f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x28c60': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x13a': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4cb2f': '0xdbfa261cd7d17bb40479a0493ad6c0fee435859e', + '0x7f93': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xb660': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xb02113d3f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xdad': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xdae': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x15b38': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x15b32': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x45c': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x45b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3d': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x41a6ace': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa729': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1f47b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1b59': '0xca11bde05977b3631167028862be2a173976ca11', + '0x1b58': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xc3': '0xca11bde05977b3631167028862be2a173976ca11', + '0x16fd8': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xc7': '0xca11bde05977b3631167028862be2a173976ca11', + '0x405': '0xca11bde05977b3631167028862be2a173976ca11', + '0x334': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1ce': '0xca11bde05977b3631167028862be2a173976ca11', + '0x1cf': '0xca11bde05977b3631167028862be2a173976ca11', + '0xa70e': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x868b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa0c71fd': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x13e31': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa1337': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1f2b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xf63': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x144': '0xF9cda624FBC7e059355ce98a31693d299FACd963', + '0x118': '0xF9cda624FBC7e059355ce98a31693d299FACd963', + '0x12c': '0xF9cda624FBC7e059355ce98a31693d299FACd963', + '0x18995f': '0xF9cda624FBC7e059355ce98a31693d299FACd963', + '0x2b74': '0xF9cda624FBC7e059355ce98a31693d299FACd963', + '0xfc': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x9da': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x137': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x13ed': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x24b1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xba9302': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x7c8': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x138d5': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x6d': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x343b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x34a1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x3109': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x91b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa96': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x22c3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2be3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xbf03': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1b254': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa7b14': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2276': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1b9e': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x6a63bb8': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x15af3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x15af1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xae3f3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x531': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x28c61': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x28c58': '0xca11bde05977b3631167028862be2a173976ca11', + '0x1d88': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x5b9b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4c7e1': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xa53b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1a2b': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x406': '0xca11bde05977b3631167028862be2a173976ca11', + '0x2cef': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x18b2': '0xca11bde05977b3631167028862be2a173976ca11', + '0x182a9': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xc4': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xfdd': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xfde': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x99c0a0f': '0xca11bde05977b3631167028862be2a173976ca11', + '0x22cf': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x310c5': '0xca11bde05977b3631167028862be2a173976ca11', + '0x46f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x659': '0xca11bde05977b3631167028862be2a173976ca11', + '0x139c968f9': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xed88': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0xd036': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1f3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x31bf8c3': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x1cbc67bfdc': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x98967f': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x4f588': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x16db': '0xca11bde05977b3631167028862be2a173976ca11', + '0x3a': '0xca11bde05977b3631167028862be2a173976ca11', + '0x59': '0xca11bde05977b3631167028862be2a173976ca11', + '0x1e0': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x2eb': '0xcA11bde05977b3631167028862bE2a173976CA11', + '0x221': '0xcA11bde05977b3631167028862bE2a173976CA11', +} as Record; + +const multicallAbi = [ + { + name: 'tryAggregate', + type: 'function', + stateMutability: 'payable', + inputs: [ + { name: 'requireSuccess', type: 'bool' }, + { + name: 'calls', + type: 'tuple[]', + components: [ + { name: 'target', type: 'address' }, + { name: 'callData', type: 'bytes' }, + ], + }, + ], + outputs: [ + { + name: 'returnData', + type: 'tuple[]', + components: [ + { name: 'success', type: 'bool' }, + { name: 'returnData', type: 'bytes' }, + ], + }, + ], + }, +]; + +type Call = { + contract: Contract; + functionSignature: string; + arguments: unknown[]; +}; + +export type MulticallResult = { success: boolean; value: unknown }; + +const multicall = async ( + calls: Call[], + multicallAddress: Hex, + provider: Web3Provider, + maxCallsPerMulticall: number, +): Promise => { + const multicallContract = new Contract( + multicallAddress, + multicallAbi, + provider, + ); + + return await reduceInBatchesSerially({ + values: calls, + batchSize: maxCallsPerMulticall, + initialResult: [], + eachBatch: async (workingResult, batch) => { + const calldata = batch.map((call) => ({ + target: call.contract.address, + callData: call.contract.interface.encodeFunctionData( + call.contract.interface.functions[call.functionSignature], + call.arguments, + ), + })); + + const results = await multicallContract.callStatic.tryAggregate( + false, + calldata, + ); + + return [ + ...workingResult, + ...results.map( + (r: { success: boolean; returnData: string }, i: number) => ({ + success: r.success, + value: r.success + ? batch[i].contract.interface.decodeFunctionResult( + batch[i].functionSignature, + r.returnData, + )[0] + : undefined, + }), + ), + ]; + }, + }); +}; + +const fallback = async ( + calls: Call[], + maxCallsParallel: number, +): Promise => { + return await reduceInBatchesSerially({ + values: calls, + batchSize: maxCallsParallel, + initialResult: [], + eachBatch: async (workingResult, batch) => { + const results = await Promise.allSettled( + batch.map((call) => + call.contract[call.functionSignature](...call.arguments), + ), + ); + return [ + ...workingResult, + ...results.map((p) => ({ + success: p.status === 'fulfilled', + value: p.status === 'fulfilled' ? p.value : undefined, + })), + ]; + }, + }); +}; + +/** + * Executes an array of contract calls. If the chain supports multicalls, + * the calls will be executed in single RPC requests (up to maxCallsPerMulticall). + * Otherwise the calls will be executed separately in parallel (up to maxCallsParallel). + * @param calls - An array of contract calls to execute. + * @param chainId - The hexadecimal chain id. + * @param provider - An ethers rpc provider. + * @param maxCallsPerMulticall - If multicall is supported, the maximum number of calls to exeute in each multicall. + * @param maxCallsParallel - If multicall is not supported, the maximum number of calls to execute in parallel. + * @returns An array of results, with a success boolean and value for each call. + */ +export const multicallOrFallback = async ( + calls: Call[], + chainId: Hex, + provider: Web3Provider, + maxCallsPerMulticall = 300, + maxCallsParallel = 20, +): Promise => { + if (calls.length === 0) { + return []; + } + + const multicallAddress = MULTICALL_CONTRACT_BY_CHAINID[chainId]; + return await (multicallAddress + ? multicall(calls, multicallAddress, provider, maxCallsPerMulticall) + : fallback(calls, maxCallsParallel)); +}; diff --git a/packages/assets-controllers/src/token-prices-service/codefi-v2.test.ts b/packages/assets-controllers/src/token-prices-service/codefi-v2.test.ts index 2ebaa0d219..e1efe858ec 100644 --- a/packages/assets-controllers/src/token-prices-service/codefi-v2.test.ts +++ b/packages/assets-controllers/src/token-prices-service/codefi-v2.test.ts @@ -5,6 +5,8 @@ import { CodefiTokenPricesServiceV2, SUPPORTED_CHAIN_IDS, SUPPORTED_CURRENCIES, + ZERO_ADDRESS, + getNativeTokenAddress, } from './codefi-v2'; // We're not customizing the default max delay @@ -208,6 +210,51 @@ describe('CodefiTokenPricesServiceV2', () => { }); }); + it('calls the /spot-prices endpoint using the correct native token address', async () => { + const mockPriceAPI = nock('https://price.api.cx.metamask.io') + .get('/v2/chains/137/spot-prices') + .query({ + tokenAddresses: '0x0000000000000000000000000000000000001010', + vsCurrency: 'ETH', + includeMarketData: 'true', + }) + .reply(200, { + '0x0000000000000000000000000000000000001010': { + price: 14, + currency: 'ETH', + pricePercentChange1d: 1, + priceChange1d: 1, + marketCap: 117219.99428314982, + allTimeHigh: 0.00060467892389492, + allTimeLow: 0.00002303954000865728, + totalVolume: 5155.094053542448, + high1d: 0.00008020715848194385, + low1d: 0.00007792083564549064, + circulatingSupply: 1494269733.9526057, + dilutedMarketCap: 117669.5125951733, + marketCapPercentChange1d: 0.76671, + pricePercentChange1h: -1.0736342953259423, + pricePercentChange7d: -7.351582573655089, + pricePercentChange14d: -1.0799098946709822, + pricePercentChange30d: -25.776321124365992, + pricePercentChange200d: 46.091571238599165, + pricePercentChange1y: -2.2992517267242754, + }, + }); + + const marketData = + await new CodefiTokenPricesServiceV2().fetchTokenPrices({ + chainId: '0x89', + tokenAddresses: [], + currency: 'ETH', + }); + + expect(mockPriceAPI.isDone()).toBe(true); + expect( + marketData['0x0000000000000000000000000000000000001010'], + ).toBeDefined(); + }); + it('should not include token price object for token address when token price in not included the response data', async () => { nock('https://price.api.cx.metamask.io') .get('/v2/chains/1/spot-prices') @@ -1960,6 +2007,19 @@ describe('CodefiTokenPricesServiceV2', () => { ).toBe(false); }); }); + + describe('getNativeTokenAddress', () => { + it('should return unique native token address for MATIC', () => { + expect(getNativeTokenAddress('0x89')).toBe( + '0x0000000000000000000000000000000000001010', + ); + }); + it('should return zero address for other chains', () => { + (['0x1', '0x2', '0x1337'] as const).forEach((chainId) => { + expect(getNativeTokenAddress(chainId)).toBe(ZERO_ADDRESS); + }); + }); + }); }); /** diff --git a/packages/assets-controllers/src/token-prices-service/codefi-v2.ts b/packages/assets-controllers/src/token-prices-service/codefi-v2.ts index b496faa2de..4f163203e4 100644 --- a/packages/assets-controllers/src/token-prices-service/codefi-v2.ts +++ b/packages/assets-controllers/src/token-prices-service/codefi-v2.ts @@ -156,6 +156,24 @@ export const SUPPORTED_CURRENCIES = [ export const ZERO_ADDRESS: Hex = '0x0000000000000000000000000000000000000000' as const; +/** + * A mapping from chain id to the address of the chain's native token. + * Only for chains whose native tokens have a specific address. + */ +const chainIdToNativeTokenAddress: Record = { + '0x89': '0x0000000000000000000000000000000000001010', +}; + +/** + * Returns the address that should be used to query the price api for the + * chain's native token. On most chains, this is signified by the zero address. + * But on some chains, the native token has a specific address. + * @param chainId - The hexadecimal chain id. + * @returns The address of the chain's native token. + */ +export const getNativeTokenAddress = (chainId: Hex): Hex => + chainIdToNativeTokenAddress[chainId] ?? ZERO_ADDRESS; + /** * A currency that can be supplied as the `vsCurrency` parameter to * the `/spot-prices` endpoint. Covers both uppercase and lowercase versions. @@ -435,7 +453,7 @@ export class CodefiTokenPricesServiceV2 const url = new URL(`${BASE_URL}/chains/${chainIdAsNumber}/spot-prices`); url.searchParams.append( 'tokenAddresses', - [ZERO_ADDRESS, ...tokenAddresses].join(','), + [getNativeTokenAddress(chainId), ...tokenAddresses].join(','), ); url.searchParams.append('vsCurrency', currency); url.searchParams.append('includeMarketData', 'true'); @@ -445,7 +463,7 @@ export class CodefiTokenPricesServiceV2 handleFetch(url, { headers: { 'Cache-Control': 'no-cache' } }), ); - return [ZERO_ADDRESS, ...tokenAddresses].reduce( + return [getNativeTokenAddress(chainId), ...tokenAddresses].reduce( ( obj: Partial>, tokenAddress, diff --git a/packages/assets-controllers/src/token-prices-service/index.test.ts b/packages/assets-controllers/src/token-prices-service/index.test.ts index a5a1e93f7a..a59be2ba4d 100644 --- a/packages/assets-controllers/src/token-prices-service/index.test.ts +++ b/packages/assets-controllers/src/token-prices-service/index.test.ts @@ -6,6 +6,7 @@ describe('token-prices-service', () => { Array [ "CodefiTokenPricesServiceV2", "SUPPORTED_CHAIN_IDS", + "getNativeTokenAddress", ] `); }); diff --git a/packages/assets-controllers/src/token-prices-service/index.ts b/packages/assets-controllers/src/token-prices-service/index.ts index f6313c36e7..509fc68005 100644 --- a/packages/assets-controllers/src/token-prices-service/index.ts +++ b/packages/assets-controllers/src/token-prices-service/index.ts @@ -1,2 +1,6 @@ export type { AbstractTokenPricesService } from './abstract-token-prices-service'; -export { CodefiTokenPricesServiceV2, SUPPORTED_CHAIN_IDS } from './codefi-v2'; +export { + CodefiTokenPricesServiceV2, + SUPPORTED_CHAIN_IDS, + getNativeTokenAddress, +} from './codefi-v2'; diff --git a/packages/chain-controller/CHANGELOG.md b/packages/chain-controller/CHANGELOG.md index 1fb577685e..e32c566071 100644 --- a/packages/chain-controller/CHANGELOG.md +++ b/packages/chain-controller/CHANGELOG.md @@ -7,6 +7,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.2.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-api` from `^8.1.3` to `^10.1.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) + - If you are depending on `@metamask/providers` directly, you will need to upgrade to 18.1.0. +- Bump `@metamask/snaps-utils` from `^4.3.6` to `^8.3.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) +- Bump `@metamask/snaps-sdk` from `^6.5.0` to `^6.7.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) +- Bump `@metamask/snaps-controllers` from `^9.7.0`to `^9.10.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) +- Bump `@metamask/utils` from `^9.1.0` to `^10.0.0` ([#4831](https://github.com/MetaMask/core/pull/4831)) + ## [0.1.3] ### Changed @@ -57,7 +68,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Initial release -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/chain-controller@0.1.3...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/chain-controller@0.2.0...HEAD +[0.2.0]: https://github.com/MetaMask/core/compare/@metamask/chain-controller@0.1.3...@metamask/chain-controller@0.2.0 [0.1.3]: https://github.com/MetaMask/core/compare/@metamask/chain-controller@0.1.2...@metamask/chain-controller@0.1.3 [0.1.2]: https://github.com/MetaMask/core/compare/@metamask/chain-controller@0.1.1...@metamask/chain-controller@0.1.2 [0.1.1]: https://github.com/MetaMask/core/compare/@metamask/chain-controller@0.1.0...@metamask/chain-controller@0.1.1 diff --git a/packages/chain-controller/package.json b/packages/chain-controller/package.json index e6fdbe5731..22b35dc4e9 100644 --- a/packages/chain-controller/package.json +++ b/packages/chain-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/chain-controller", - "version": "0.1.3", + "version": "0.2.0", "description": "Manages chain-agnostic providers", "keywords": [ "MetaMask", @@ -49,10 +49,10 @@ "dependencies": { "@metamask/base-controller": "^7.0.2", "@metamask/chain-api": "^0.1.0", - "@metamask/keyring-api": "^8.1.3", - "@metamask/snaps-controllers": "^9.7.0", - "@metamask/snaps-sdk": "^6.5.0", - "@metamask/snaps-utils": "^8.1.1", + "@metamask/keyring-api": "^10.1.0", + "@metamask/snaps-controllers": "^9.10.0", + "@metamask/snaps-sdk": "^6.7.0", + "@metamask/snaps-utils": "^8.3.0", "@metamask/utils": "^10.0.0", "uuid": "^8.3.2" }, diff --git a/packages/keyring-controller/CHANGELOG.md b/packages/keyring-controller/CHANGELOG.md index ee4831644e..a6e08923d1 100644 --- a/packages/keyring-controller/CHANGELOG.md +++ b/packages/keyring-controller/CHANGELOG.md @@ -7,6 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [19.0.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-api` from `^8.1.3` to `^10.1.0` ([#4948](https://github.com/MetaMask/core/pull/4948)) + - If you are depending on `@metamask/providers` directly, you will need to upgrade to 18.1.0. + ## [18.0.0] ### Removed @@ -581,7 +588,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 All changes listed after this point were applied to this package following the monorepo conversion. -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/keyring-controller@18.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/keyring-controller@19.0.0...HEAD +[19.0.0]: https://github.com/MetaMask/core/compare/@metamask/keyring-controller@18.0.0...@metamask/keyring-controller@19.0.0 [18.0.0]: https://github.com/MetaMask/core/compare/@metamask/keyring-controller@17.3.1...@metamask/keyring-controller@18.0.0 [17.3.1]: https://github.com/MetaMask/core/compare/@metamask/keyring-controller@17.3.0...@metamask/keyring-controller@17.3.1 [17.3.0]: https://github.com/MetaMask/core/compare/@metamask/keyring-controller@17.2.2...@metamask/keyring-controller@17.3.0 diff --git a/packages/keyring-controller/package.json b/packages/keyring-controller/package.json index 6bcca26f1e..f8ebe7a1b2 100644 --- a/packages/keyring-controller/package.json +++ b/packages/keyring-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/keyring-controller", - "version": "18.0.0", + "version": "19.0.0", "description": "Stores identities seen in the wallet and manages interactions such as signing", "keywords": [ "MetaMask", @@ -54,7 +54,7 @@ "@metamask/eth-hd-keyring": "^7.0.4", "@metamask/eth-sig-util": "^8.0.0", "@metamask/eth-simple-keyring": "^6.0.5", - "@metamask/keyring-api": "^8.1.3", + "@metamask/keyring-api": "^10.1.0", "@metamask/message-manager": "^11.0.1", "@metamask/utils": "^10.0.0", "async-mutex": "^0.5.0", diff --git a/packages/multichain/CHANGELOG.md b/packages/multichain/CHANGELOG.md index b518709c7b..bbe1561ee9 100644 --- a/packages/multichain/CHANGELOG.md +++ b/packages/multichain/CHANGELOG.md @@ -7,4 +7,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -[Unreleased]: https://github.com/MetaMask/core/ +## [1.0.0] + +### Added + +- Initial release ([#4962](https://github.com/MetaMask/core/pull/4962)) + +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/multichain@1.0.0...HEAD +[1.0.0]: https://github.com/MetaMask/core/releases/tag/@metamask/multichain@1.0.0 diff --git a/packages/multichain/package.json b/packages/multichain/package.json index 4fa4f7ccfc..c23a839a8b 100644 --- a/packages/multichain/package.json +++ b/packages/multichain/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/multichain", - "version": "0.0.0", + "version": "1.0.0", "description": "Provides types, helpers, adapters, and wrappers for facilitating CAIP Multichain sessions", "keywords": [ "MetaMask", @@ -46,8 +46,18 @@ "test:verbose": "NODE_OPTIONS=--experimental-vm-modules jest --verbose", "test:watch": "NODE_OPTIONS=--experimental-vm-modules jest --watch" }, + "dependencies": { + "@metamask/api-specs": "^0.10.12", + "@metamask/controller-utils": "^11.4.3", + "@metamask/eth-json-rpc-filters": "^7.0.0", + "@metamask/rpc-errors": "^7.0.1", + "@metamask/utils": "^10.0.0", + "lodash": "^4.17.21" + }, "devDependencies": { "@metamask/auto-changelog": "^3.4.4", + "@metamask/network-controller": "^22.0.2", + "@metamask/permission-controller": "^11.0.3", "@types/jest": "^27.4.1", "deepmerge": "^4.2.2", "jest": "^27.5.1", @@ -56,6 +66,10 @@ "typedoc-plugin-missing-exports": "^2.0.0", "typescript": "~5.2.2" }, + "peerDependencies": { + "@metamask/network-controller": "^22.0.0", + "@metamask/permission-controller": "^11.0.0" + }, "engines": { "node": "^18.18 || >=20" }, diff --git a/packages/multichain/src/adapters/caip-permission-adapter-eth-accounts.test.ts b/packages/multichain/src/adapters/caip-permission-adapter-eth-accounts.test.ts new file mode 100644 index 0000000000..e6ae16f968 --- /dev/null +++ b/packages/multichain/src/adapters/caip-permission-adapter-eth-accounts.test.ts @@ -0,0 +1,209 @@ +import type { Caip25CaveatValue } from '../caip25Permission'; +import { + getEthAccounts, + setEthAccounts, +} from './caip-permission-adapter-eth-accounts'; + +describe('CAIP-25 eth_accounts adapters', () => { + describe('getEthAccounts', () => { + it('returns an empty array if the required scopes are empty', () => { + const ethAccounts = getEthAccounts({ + requiredScopes: {}, + optionalScopes: {}, + }); + expect(ethAccounts).toStrictEqual([]); + }); + it('returns an empty array if the scope objects have no accounts', () => { + const ethAccounts = getEthAccounts({ + requiredScopes: { + 'eip155:1': { accounts: [] }, + 'eip155:2': { accounts: [] }, + }, + optionalScopes: {}, + }); + expect(ethAccounts).toStrictEqual([]); + }); + it('returns an empty array if the scope objects have no eth accounts', () => { + const ethAccounts = getEthAccounts({ + requiredScopes: { + 'bip122:000000000019d6689c085ae165831e93': { + accounts: [ + 'bip122:000000000019d6689c085ae165831e93:128Lkh3S7CkDTBZ8W7BbpsN3YYizJMp8p6', + ], + }, + }, + optionalScopes: {}, + }); + expect(ethAccounts).toStrictEqual([]); + }); + + it('returns the unique set of EIP155 accounts from the CAIP-25 caveat value', () => { + const ethAccounts = getEthAccounts({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + 'eip155:5': { + accounts: ['eip155:5:0x2', 'eip155:1:0x3'], + }, + 'bip122:000000000019d6689c085ae165831e93': { + accounts: [ + 'bip122:000000000019d6689c085ae165831e93:128Lkh3S7CkDTBZ8W7BbpsN3YYizJMp8p6', + ], + }, + }, + optionalScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x4'], + }, + 'eip155:10': { + accounts: [], + }, + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + 'wallet:eip155': { + accounts: ['wallet:eip155:0x5'], + }, + }, + }); + + expect(ethAccounts).toStrictEqual([ + '0x1', + '0x2', + '0x3', + '0x4', + '0x100', + '0x5', + ]); + }); + }); + + describe('setEthAccounts', () => { + it('returns a CAIP-25 caveat value with all EIP-155 scopeObject.accounts set to CAIP-10 account addresses formed from the accounts param', () => { + const input: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + 'eip155:5': { + accounts: ['eip155:5:0x2', 'eip155:1:0x3'], + }, + 'bip122:000000000019d6689c085ae165831e93': { + accounts: [ + 'bip122:000000000019d6689c085ae165831e93:128Lkh3S7CkDTBZ8W7BbpsN3YYizJMp8p6', + ], + }, + }, + optionalScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x4'], + }, + 'eip155:10': { + accounts: [], + }, + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + 'wallet:eip155': { + accounts: [], + }, + wallet: { + accounts: [], + }, + }, + isMultichainOrigin: false, + }; + + const result = setEthAccounts(input, ['0x1', '0x2', '0x3']); + expect(result).toStrictEqual({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2', 'eip155:1:0x3'], + }, + 'eip155:5': { + accounts: ['eip155:5:0x1', 'eip155:5:0x2', 'eip155:5:0x3'], + }, + 'bip122:000000000019d6689c085ae165831e93': { + accounts: [ + 'bip122:000000000019d6689c085ae165831e93:128Lkh3S7CkDTBZ8W7BbpsN3YYizJMp8p6', + ], + }, + }, + optionalScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2', 'eip155:1:0x3'], + }, + 'eip155:10': { + accounts: ['eip155:10:0x1', 'eip155:10:0x2', 'eip155:10:0x3'], + }, + 'eip155:100': { + accounts: ['eip155:100:0x1', 'eip155:100:0x2', 'eip155:100:0x3'], + }, + 'wallet:eip155': { + accounts: [ + 'wallet:eip155:0x1', + 'wallet:eip155:0x2', + 'wallet:eip155:0x3', + ], + }, + wallet: { + accounts: [ + 'wallet:eip155:0x1', + 'wallet:eip155:0x2', + 'wallet:eip155:0x3', + ], + }, + }, + isMultichainOrigin: false, + }); + }); + + it('returns a CAIP-25 caveat value with missing "wallet:eip155" optional scope filled in, forming CAIP-10 account addresses from the accounts param', () => { + const input: Caip25CaveatValue = { + requiredScopes: {}, + optionalScopes: {}, + isMultichainOrigin: false, + }; + + const result = setEthAccounts(input, ['0x1', '0x2', '0x3']); + expect(result).toStrictEqual({ + requiredScopes: {}, + optionalScopes: { + 'wallet:eip155': { + accounts: [ + 'wallet:eip155:0x1', + 'wallet:eip155:0x2', + 'wallet:eip155:0x3', + ], + }, + }, + isMultichainOrigin: false, + }); + }); + + it('does not modify the input CAIP-25 caveat value object in place', () => { + const input: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + }, + optionalScopes: {}, + isMultichainOrigin: false, + }; + + const result = setEthAccounts(input, ['0x1', '0x2', '0x3']); + expect(input).toStrictEqual({ + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + }, + optionalScopes: {}, + isMultichainOrigin: false, + }); + expect(input).not.toStrictEqual(result); + }); + }); +}); diff --git a/packages/multichain/src/adapters/caip-permission-adapter-eth-accounts.ts b/packages/multichain/src/adapters/caip-permission-adapter-eth-accounts.ts new file mode 100644 index 0000000000..bcdcab9e80 --- /dev/null +++ b/packages/multichain/src/adapters/caip-permission-adapter-eth-accounts.ts @@ -0,0 +1,148 @@ +import { + assertIsStrictHexString, + type CaipAccountId, + type Hex, + KnownCaipNamespace, + parseCaipAccountId, +} from '@metamask/utils'; + +import type { Caip25CaveatValue } from '../caip25Permission'; +import { KnownWalletScopeString } from '../scope/constants'; +import { getUniqueArrayItems } from '../scope/transform'; +import type { InternalScopeString, InternalScopesObject } from '../scope/types'; +import { parseScopeString } from '../scope/types'; + +/** + * Checks if a scope string is either an EIP155 or wallet namespaced scope string. + * @param scopeString - The scope string to check. + * @returns True if the scope string is an EIP155 or wallet namespaced scope string, false otherwise. + */ +const isEip155ScopeString = (scopeString: InternalScopeString) => { + const { namespace } = parseScopeString(scopeString); + + return ( + namespace === KnownCaipNamespace.Eip155 || + scopeString === KnownWalletScopeString.Eip155 + ); +}; + +/** + * Gets the Ethereum (EIP155 namespaced) accounts from internal scopes. + * @param scopes - The internal scopes from which to get the Ethereum accounts. + * @returns An array of Ethereum accounts. + */ +const getEthAccountsFromScopes = (scopes: InternalScopesObject) => { + const ethAccounts: Hex[] = []; + + Object.entries(scopes).forEach(([_, { accounts }]) => { + accounts?.forEach((account) => { + const { address, chainId } = parseCaipAccountId(account); + + if (isEip155ScopeString(chainId)) { + // This address should always be a valid Hex string because + // it's an EIP155/Ethereum account + assertIsStrictHexString(address); + ethAccounts.push(address); + } + }); + }); + + return ethAccounts; +}; + +/** + * Gets the Ethereum (EIP155 namespaced) accounts from the required and optional scopes. + * @param caip25CaveatValue - The CAIP-25 caveat value to get the Ethereum accounts from. + * @returns An array of Ethereum accounts. + */ +export const getEthAccounts = ( + caip25CaveatValue: Pick< + Caip25CaveatValue, + 'requiredScopes' | 'optionalScopes' + >, +): Hex[] => { + const { requiredScopes, optionalScopes } = caip25CaveatValue; + + const ethAccounts: Hex[] = [ + ...getEthAccountsFromScopes(requiredScopes), + ...getEthAccountsFromScopes(optionalScopes), + ]; + + return getUniqueArrayItems(ethAccounts); +}; + +/** + * Sets the Ethereum (EIP155 namespaced) accounts for the given scopes object. + * @param scopesObject - The scopes object to set the Ethereum accounts for. + * @param accounts - The Ethereum accounts to set. + * @returns The updated scopes object with the Ethereum accounts set. + */ +const setEthAccountsForScopesObject = ( + scopesObject: InternalScopesObject, + accounts: Hex[], +) => { + const updatedScopesObject: InternalScopesObject = {}; + Object.entries(scopesObject).forEach(([key, scopeObject]) => { + // Cast needed because index type is returned as `string` by `Object.entries` + const scopeString = key as keyof typeof scopesObject; + const isWalletNamespace = scopeString === KnownCaipNamespace.Wallet; + const { namespace, reference } = parseScopeString(scopeString); + if (!isEip155ScopeString(scopeString) && !isWalletNamespace) { + updatedScopesObject[scopeString] = scopeObject; + return; + } + + let caipAccounts: CaipAccountId[] = []; + if (isWalletNamespace) { + caipAccounts = accounts.map( + (account) => `${KnownWalletScopeString.Eip155}:${account}`, + ); + } else if (namespace && reference) { + caipAccounts = accounts.map( + (account) => `${namespace}:${reference}:${account}`, + ); + } + + updatedScopesObject[scopeString] = { + ...scopeObject, + accounts: caipAccounts, + }; + }); + + return updatedScopesObject; +}; + +/** + * Sets the Ethereum (EIP155 namespaced) accounts for the given CAIP-25 caveat value. + * We set the same accounts for all the scopes that are EIP155 or Wallet namespaced because + * we do not provide UI/UX flows for selecting different accounts across different chains. + * + * Additionally, this function adds a `wallet:eip155` scope with empty methods, notifications, and accounts + * to ensure that the `wallet:eip155` scope is always present in the caveat value. + * This is required for Snaps currently can have account permissions without chain permissions. + * This added `wallet:eip155` scope should be removed once Snaps are able to have/use chain permissions. + * @param caip25CaveatValue - The CAIP-25 caveat value to set the Ethereum accounts for. + * @param accounts - The Ethereum accounts to set. + * @returns The updated CAIP-25 caveat value with the Ethereum accounts set. + */ +export const setEthAccounts = ( + caip25CaveatValue: Caip25CaveatValue, + accounts: Hex[], +): Caip25CaveatValue => { + return { + ...caip25CaveatValue, + requiredScopes: setEthAccountsForScopesObject( + caip25CaveatValue.requiredScopes, + accounts, + ), + optionalScopes: setEthAccountsForScopesObject( + { + [KnownWalletScopeString.Eip155]: { + accounts: [], + }, + ...caip25CaveatValue.optionalScopes, + }, + accounts, + ), + }; +}; diff --git a/packages/multichain/src/adapters/caip-permission-adapter-permittedChains.test.ts b/packages/multichain/src/adapters/caip-permission-adapter-permittedChains.test.ts new file mode 100644 index 0000000000..bc9b0ccd7c --- /dev/null +++ b/packages/multichain/src/adapters/caip-permission-adapter-permittedChains.test.ts @@ -0,0 +1,265 @@ +import type { Caip25CaveatValue } from '../caip25Permission'; +import { + addPermittedEthChainId, + getPermittedEthChainIds, + setPermittedEthChainIds, +} from './caip-permission-adapter-permittedChains'; + +describe('CAIP-25 permittedChains adapters', () => { + describe('getPermittedEthChainIds', () => { + it('returns the unique set of EIP155 chainIds in hexadecimal format from the CAIP-25 caveat value', () => { + const ethChainIds = getPermittedEthChainIds({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + 'eip155:5': { + accounts: ['eip155:5:0x2', 'eip155:1:0x3'], + }, + 'bip122:000000000019d6689c085ae165831e93': { + accounts: [ + 'bip122:000000000019d6689c085ae165831e93:128Lkh3S7CkDTBZ8W7BbpsN3YYizJMp8p6', + ], + }, + }, + optionalScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x4'], + }, + 'eip155:10': { + accounts: [], + }, + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + }, + }); + + expect(ethChainIds).toStrictEqual(['0x1', '0x5', '0xa', '0x64']); + }); + }); + + describe('addPermittedEthChainId', () => { + it('returns a version of the caveat value with a new optional scope for the chainId if it does not already exist in required or optional scopes', () => { + const result = addPermittedEthChainId( + { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: { + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + 'wallet:eip155': { + accounts: [], + }, + }, + isMultichainOrigin: false, + }, + '0x65', + ); + + expect(result).toStrictEqual({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: { + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + 'eip155:101': { + accounts: [], + }, + 'wallet:eip155': { + accounts: [], + }, + }, + isMultichainOrigin: false, + }); + }); + + it('does not modify the input CAIP-25 caveat value object', () => { + const input: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: {}, + isMultichainOrigin: false, + }; + + const result = addPermittedEthChainId(input, '0x65'); + + expect(input).toStrictEqual({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: {}, + isMultichainOrigin: false, + }); + expect(input).not.toStrictEqual(result); + }); + + it('does not add an optional scope for the chainId if already exists in the required scopes', () => { + const input: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: { + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + }, + isMultichainOrigin: false, + }; + const result = addPermittedEthChainId(input, '0x1'); + + expect(result).toStrictEqual(input); + }); + + it('does not add an optional scope for the chainId if already exists in the optional scopes', () => { + const input: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: { + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + }, + isMultichainOrigin: false, + }; + const result = addPermittedEthChainId(input, '0x64'); // 0x64 === 100 + + expect(result).toStrictEqual(input); + }); + }); + + describe('setPermittedEthChainIds', () => { + it('returns a CAIP-25 caveat value with EIP-155 scopes missing from the chainIds array removed', () => { + const result = setPermittedEthChainIds( + { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + 'bip122:000000000019d6689c085ae165831e93': { + accounts: [], + }, + }, + optionalScopes: { + wallet: { + accounts: [], + }, + 'eip155:1': { + accounts: [], + }, + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + }, + isMultichainOrigin: false, + }, + ['0x1'], + ); + + expect(result).toStrictEqual({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + 'bip122:000000000019d6689c085ae165831e93': { + accounts: [], + }, + }, + optionalScopes: { + wallet: { + accounts: [], + }, + 'eip155:1': { + accounts: [], + }, + }, + isMultichainOrigin: false, + }); + }); + + it('returns a CAIP-25 caveat value with optional scopes added for missing chainIds', () => { + const result = setPermittedEthChainIds( + { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: { + 'eip155:1': { + accounts: [], + }, + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + }, + isMultichainOrigin: false, + }, + ['0x1', '0x64', '0x65'], + ); + + expect(result).toStrictEqual({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: { + 'eip155:1': { + accounts: [], + }, + 'eip155:100': { + accounts: ['eip155:100:0x100'], + }, + 'eip155:101': { + accounts: [], + }, + }, + isMultichainOrigin: false, + }); + }); + + it('does not modify the input CAIP-25 caveat value object', () => { + const input: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: {}, + isMultichainOrigin: false, + }; + + const result = setPermittedEthChainIds(input, ['0x1', '0x2', '0x3']); + + expect(input).toStrictEqual({ + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: {}, + isMultichainOrigin: false, + }); + expect(input).not.toStrictEqual(result); + }); + }); +}); diff --git a/packages/multichain/src/adapters/caip-permission-adapter-permittedChains.ts b/packages/multichain/src/adapters/caip-permission-adapter-permittedChains.ts new file mode 100644 index 0000000000..f56ff36137 --- /dev/null +++ b/packages/multichain/src/adapters/caip-permission-adapter-permittedChains.ts @@ -0,0 +1,142 @@ +import { toHex } from '@metamask/controller-utils'; +import type { Hex } from '@metamask/utils'; +import { KnownCaipNamespace } from '@metamask/utils'; + +import type { Caip25CaveatValue } from '../caip25Permission'; +import { getUniqueArrayItems } from '../scope/transform'; +import type { InternalScopesObject } from '../scope/types'; +import { parseScopeString } from '../scope/types'; + +/** + * Gets the Ethereum (EIP155 namespaced) chainIDs from internal scopes. + * @param scopes - The internal scopes from which to get the Ethereum chainIDs. + * @returns An array of Ethereum chainIDs. + */ +const getPermittedEthChainIdsFromScopes = (scopes: InternalScopesObject) => { + const ethChainIds: Hex[] = []; + + Object.keys(scopes).forEach((scopeString) => { + const { namespace, reference } = parseScopeString(scopeString); + if (namespace === KnownCaipNamespace.Eip155 && reference) { + ethChainIds.push(toHex(reference)); + } + }); + + return ethChainIds; +}; + +/** + * Gets the Ethereum (EIP155 namespaced) chainIDs from the required and optional scopes. + * @param caip25CaveatValue - The CAIP-25 caveat value from which to get the Ethereum chainIDs. + * @returns An array of Ethereum chainIDs. + */ +export const getPermittedEthChainIds = ( + caip25CaveatValue: Pick< + Caip25CaveatValue, + 'requiredScopes' | 'optionalScopes' + >, +) => { + const { requiredScopes, optionalScopes } = caip25CaveatValue; + + const ethChainIds: Hex[] = [ + ...getPermittedEthChainIdsFromScopes(requiredScopes), + ...getPermittedEthChainIdsFromScopes(optionalScopes), + ]; + + return getUniqueArrayItems(ethChainIds); +}; + +/** + * Adds an Ethereum (EIP155 namespaced) chainID to the optional scopes if it is not already present + * in either the pre-existing required or optional scopes. + * @param caip25CaveatValue - The CAIP-25 caveat value to add the Ethereum chainID to. + * @param chainId - The Ethereum chainID to add. + * @returns The updated CAIP-25 caveat value with the added Ethereum chainID. + */ +export const addPermittedEthChainId = ( + caip25CaveatValue: Caip25CaveatValue, + chainId: Hex, +): Caip25CaveatValue => { + const scopeString = `eip155:${parseInt(chainId, 16)}`; + if ( + Object.keys(caip25CaveatValue.requiredScopes).includes(scopeString) || + Object.keys(caip25CaveatValue.optionalScopes).includes(scopeString) + ) { + return caip25CaveatValue; + } + + return { + ...caip25CaveatValue, + optionalScopes: { + ...caip25CaveatValue.optionalScopes, + [scopeString]: { + accounts: [], + }, + }, + }; +}; + +/** + * Filters the scopes object to only include: + * - Scopes without references (e.g. "wallet:") + * - EIP155 scopes for the given chainIDs + * - Non EIP155 scopes (e.g. "bip122:" or any other non ethereum namespaces) + * @param scopesObject - The scopes object to filter. + * @param chainIds - The chainIDs to filter EIP155 scopes by. + * @returns The filtered scopes object. + */ +const filterEthScopesObjectByChainId = ( + scopesObject: InternalScopesObject, + chainIds: Hex[], +): InternalScopesObject => { + const updatedScopesObject: InternalScopesObject = {}; + + Object.entries(scopesObject).forEach(([key, scopeObject]) => { + // Cast needed because index type is returned as `string` by `Object.entries` + const scopeString = key as keyof typeof scopesObject; + const { namespace, reference } = parseScopeString(scopeString); + if (!reference) { + updatedScopesObject[scopeString] = scopeObject; + return; + } + if (namespace === KnownCaipNamespace.Eip155) { + const chainId = toHex(reference); + if (chainIds.includes(chainId)) { + updatedScopesObject[scopeString] = scopeObject; + } + } else { + updatedScopesObject[scopeString] = scopeObject; + } + }); + + return updatedScopesObject; +}; + +/** + * Sets the permitted Ethereum (EIP155 namespaced) chainIDs for the required and optional scopes. + * @param caip25CaveatValue - The CAIP-25 caveat value to set the permitted Ethereum chainIDs for. + * @param chainIds - The Ethereum chainIDs to set as permitted. + * @returns The updated CAIP-25 caveat value with the permitted Ethereum chainIDs. + */ +export const setPermittedEthChainIds = ( + caip25CaveatValue: Caip25CaveatValue, + chainIds: Hex[], +): Caip25CaveatValue => { + let updatedCaveatValue: Caip25CaveatValue = { + ...caip25CaveatValue, + requiredScopes: filterEthScopesObjectByChainId( + caip25CaveatValue.requiredScopes, + chainIds, + ), + optionalScopes: filterEthScopesObjectByChainId( + caip25CaveatValue.optionalScopes, + chainIds, + ), + }; + + chainIds.forEach((chainId) => { + updatedCaveatValue = addPermittedEthChainId(updatedCaveatValue, chainId); + }); + + return updatedCaveatValue; +}; diff --git a/packages/multichain/src/caip25Permission.test.ts b/packages/multichain/src/caip25Permission.test.ts new file mode 100644 index 0000000000..b434e10603 --- /dev/null +++ b/packages/multichain/src/caip25Permission.test.ts @@ -0,0 +1,652 @@ +import { + CaveatMutatorOperation, + PermissionType, +} from '@metamask/permission-controller'; + +import type { Caip25CaveatValue } from './caip25Permission'; +import { + Caip25CaveatType, + caip25EndowmentBuilder, + Caip25EndowmentPermissionName, + Caip25CaveatMutators, + createCaip25Caveat, +} from './caip25Permission'; +import * as ScopeSupported from './scope/supported'; + +jest.mock('./scope/supported', () => ({ + ...jest.requireActual('./scope/supported'), + isSupportedScopeString: jest.fn(), +})); +const MockScopeSupported = jest.mocked(ScopeSupported); + +const { removeAccount, removeScope } = Caip25CaveatMutators[Caip25CaveatType]; + +describe('caip25EndowmentBuilder', () => { + describe('specificationBuilder', () => { + it('builds the expected permission specification', () => { + const specification = caip25EndowmentBuilder.specificationBuilder({ + methodHooks: { + findNetworkClientIdByChainId: jest.fn(), + listAccounts: jest.fn(), + }, + }); + expect(specification).toStrictEqual({ + permissionType: PermissionType.Endowment, + targetName: Caip25EndowmentPermissionName, + endowmentGetter: expect.any(Function), + allowedCaveats: [Caip25CaveatType], + validator: expect.any(Function), + }); + + expect(specification.endowmentGetter()).toBeNull(); + }); + }); + + describe('createCaip25Caveat', () => { + it('builds the caveat', () => { + expect( + createCaip25Caveat({ + requiredScopes: {}, + optionalScopes: {}, + isMultichainOrigin: true, + }), + ).toStrictEqual({ + type: Caip25CaveatType, + value: { + requiredScopes: {}, + optionalScopes: {}, + isMultichainOrigin: true, + }, + }); + }); + }); + + describe('Caip25CaveatMutators.authorizedScopes', () => { + describe('removeScope', () => { + it('returns a version of the caveat with the given scope removed from requiredScopes if it is present', () => { + const ethereumGoerliCaveat = { + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: [], + }, + }, + sessionProperties: {}, + isMultichainOrigin: true, + }; + const result = removeScope(ethereumGoerliCaveat, 'eip155:1'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.UpdateValue, + value: { + requiredScopes: {}, + optionalScopes: { + 'eip155:5': { + accounts: [], + }, + }, + }, + }); + }); + + it('returns a version of the caveat with the given scope removed from optionalScopes if it is present', () => { + const ethereumGoerliCaveat = { + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: [], + }, + }, + sessionProperties: {}, + isMultichainOrigin: true, + }; + const result = removeScope(ethereumGoerliCaveat, 'eip155:5'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.UpdateValue, + value: { + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + }, + optionalScopes: {}, + }, + }); + }); + + it('returns a version of the caveat with the given scope removed from requiredScopes and optionalScopes if it is present', () => { + const ethereumGoerliCaveat = { + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + 'eip155:5': { + accounts: [], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: [], + }, + }, + sessionProperties: {}, + isMultichainOrigin: true, + }; + const result = removeScope(ethereumGoerliCaveat, 'eip155:5'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.UpdateValue, + value: { + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + }, + optionalScopes: {}, + }, + }); + }); + + it('returns the caveat unchanged when the given scope is not found in either requiredScopes or optionalScopes', () => { + const ethereumGoerliCaveat = { + requiredScopes: { + 'eip155:1': { + accounts: [], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: [], + }, + }, + sessionProperties: {}, + isMultichainOrigin: true, + }; + const result = removeScope(ethereumGoerliCaveat, 'eip155:2'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.Noop, + }); + }); + }); + + describe('removeAccount', () => { + it('returns a version of the caveat with the given account removed from requiredScopes if it is present', () => { + const ethereumGoerliCaveat: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: {}, + isMultichainOrigin: true, + }; + const result = removeAccount(ethereumGoerliCaveat, '0x1'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.UpdateValue, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x2'], + }, + }, + optionalScopes: {}, + isMultichainOrigin: true, + }, + }); + }); + + it('returns a version of the caveat with the given account removed from optionalScopes if it is present', () => { + const ethereumGoerliCaveat: Caip25CaveatValue = { + requiredScopes: {}, + optionalScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + isMultichainOrigin: true, + }; + const result = removeAccount(ethereumGoerliCaveat, '0x1'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.UpdateValue, + value: { + requiredScopes: {}, + optionalScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x2'], + }, + }, + isMultichainOrigin: true, + }, + }); + }); + + it('returns a version of the caveat with the given account removed from requiredScopes and optionalScopes if it is present', () => { + const ethereumGoerliCaveat: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + 'eip155:2': { + accounts: ['eip155:2:0x1', 'eip155:2:0x2'], + }, + }, + optionalScopes: { + 'eip155:3': { + accounts: ['eip155:3:0x1', 'eip155:3:0x2'], + }, + }, + isMultichainOrigin: true, + }; + const result = removeAccount(ethereumGoerliCaveat, '0x1'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.UpdateValue, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x2'], + }, + 'eip155:2': { + accounts: ['eip155:2:0x2'], + }, + }, + optionalScopes: { + 'eip155:3': { + accounts: ['eip155:3:0x2'], + }, + }, + isMultichainOrigin: true, + }, + }); + }); + + it('returns the caveat unchanged when the given account is not found in either requiredScopes or optionalScopes', () => { + const ethereumGoerliCaveat: Caip25CaveatValue = { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0x1', 'eip155:1:0x2'], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: [], + }, + }, + isMultichainOrigin: true, + }; + const result = removeAccount(ethereumGoerliCaveat, '0x3'); + expect(result).toStrictEqual({ + operation: CaveatMutatorOperation.Noop, + }); + }); + }); + }); + + describe('permission validator', () => { + const findNetworkClientIdByChainId = jest.fn(); + const listAccounts = jest.fn(); + const { validator } = caip25EndowmentBuilder.specificationBuilder({ + methodHooks: { + findNetworkClientIdByChainId, + listAccounts, + }, + }); + + it('throws an error if there is not exactly one caveat', () => { + expect(() => { + validator({ + caveats: [ + { + type: 'caveatType', + value: {}, + }, + { + type: 'caveatType', + value: {}, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Invalid caveats. There must be a single caveat of type "${Caip25CaveatType}".`, + ), + ); + + expect(() => { + validator({ + // @ts-expect-error Intentionally invalid input + caveats: [], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Invalid caveats. There must be a single caveat of type "${Caip25CaveatType}".`, + ), + ); + }); + + it('throws an error if there is no CAIP-25 caveat', () => { + expect(() => { + validator({ + caveats: [ + { + type: 'NotCaip25Caveat', + value: {}, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Invalid caveats. There must be a single caveat of type "${Caip25CaveatType}".`, + ), + ); + }); + + it('throws an error if the CAIP-25 caveat is malformed', () => { + expect(() => { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + missingRequiredScopes: {}, + optionalScopes: {}, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Received invalid value for caveat of type "${Caip25CaveatType}".`, + ), + ); + + expect(() => { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: {}, + missingOptionalScopes: {}, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Received invalid value for caveat of type "${Caip25CaveatType}".`, + ), + ); + + expect(() => { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: {}, + optionalScopes: {}, + isMultichainOrigin: 'NotABoolean', + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Received invalid value for caveat of type "${Caip25CaveatType}".`, + ), + ); + }); + + it('asserts the internal required scopeStrings are supported', () => { + try { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0xdead'], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: ['eip155:5:0xbeef'], + }, + }, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + } catch (err) { + // noop + } + expect(MockScopeSupported.isSupportedScopeString).toHaveBeenCalledWith( + 'eip155:1', + expect.any(Function), + ); + + MockScopeSupported.isSupportedScopeString.mock.calls[0][1]('0x1'); + expect(findNetworkClientIdByChainId).toHaveBeenCalledWith('0x1'); + }); + + it('asserts the internal optional scopeStrings are supported', () => { + try { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0xdead'], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: ['eip155:5:0xbeef'], + }, + }, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + } catch (err) { + // noop + } + + expect(MockScopeSupported.isSupportedScopeString).toHaveBeenCalledWith( + 'eip155:5', + expect.any(Function), + ); + + MockScopeSupported.isSupportedScopeString.mock.calls[1][1]('0x5'); + expect(findNetworkClientIdByChainId).toHaveBeenCalledWith('0x5'); + }); + + it('does not throw if unable to find a network client for the chainId', () => { + findNetworkClientIdByChainId.mockImplementation(() => { + throw new Error('unable to find network client'); + }); + try { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0xdead'], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: ['eip155:5:0xbeef'], + }, + }, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + } catch (err) { + // noop + } + + expect( + MockScopeSupported.isSupportedScopeString.mock.calls[0][1]('0x1'), + ).toBe(false); + expect(findNetworkClientIdByChainId).toHaveBeenCalledWith('0x1'); + }); + + it('throws if not all scopeStrings are supported', () => { + expect(() => { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0xdead'], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: ['eip155:5:0xbeef'], + }, + }, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Received scopeString value(s) for caveat of type "${Caip25CaveatType}" that are not supported by the wallet.`, + ), + ); + }); + + it('throws if the eth accounts specified in the internal scopeObjects are not found in the wallet keyring', () => { + MockScopeSupported.isSupportedScopeString.mockReturnValue(true); + listAccounts.mockReturnValue([{ address: '0xdead' }]); // missing '0xbeef' + + expect(() => { + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0xdead'], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: ['eip155:5:0xbeef'], + }, + }, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }); + }).toThrow( + new Error( + `${Caip25EndowmentPermissionName} error: Received eip155 account value(s) for caveat of type "${Caip25CaveatType}" that were not found in the wallet keyring.`, + ), + ); + }); + + it('does not throw if the CAIP-25 caveat value is valid', () => { + MockScopeSupported.isSupportedScopeString.mockReturnValue(true); + listAccounts.mockReturnValue([ + { address: '0xdead' }, + { address: '0xbeef' }, + ]); + + expect( + validator({ + caveats: [ + { + type: Caip25CaveatType, + value: { + requiredScopes: { + 'eip155:1': { + accounts: ['eip155:1:0xdead'], + }, + }, + optionalScopes: { + 'eip155:5': { + accounts: ['eip155:5:0xbeef'], + }, + }, + isMultichainOrigin: true, + }, + }, + ], + date: 1234, + id: '1', + invoker: 'test.com', + parentCapability: Caip25EndowmentPermissionName, + }), + ).toBeUndefined(); + }); + }); +}); diff --git a/packages/multichain/src/caip25Permission.ts b/packages/multichain/src/caip25Permission.ts new file mode 100644 index 0000000000..7a8dd93f23 --- /dev/null +++ b/packages/multichain/src/caip25Permission.ts @@ -0,0 +1,299 @@ +import type { NetworkClientId } from '@metamask/network-controller'; +import type { + PermissionSpecificationBuilder, + EndowmentGetterParams, + ValidPermissionSpecification, + PermissionValidatorConstraint, + PermissionConstraint, +} from '@metamask/permission-controller'; +import { + CaveatMutatorOperation, + PermissionType, +} from '@metamask/permission-controller'; +import type { CaipAccountId, Json } from '@metamask/utils'; +import { + hasProperty, + parseCaipAccountId, + type Hex, + type NonEmptyArray, +} from '@metamask/utils'; +import { cloneDeep, isEqual } from 'lodash'; + +import { getEthAccounts } from './adapters/caip-permission-adapter-eth-accounts'; +import { assertIsInternalScopesObject } from './scope/assert'; +import { isSupportedScopeString } from './scope/supported'; +import { + type ExternalScopeString, + type InternalScopeObject, + type InternalScopesObject, +} from './scope/types'; + +/** + * The CAIP-25 permission caveat value. + * This permission contains the required and optional scopes and session properties from the [CAIP-25](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-25.md) request that initiated the permission session. + * It also contains a boolean (isMultichainOrigin) indicating if the permission session is multichain, which may be needed to determine implicit permissioning. + */ +export type Caip25CaveatValue = { + requiredScopes: InternalScopesObject; + optionalScopes: InternalScopesObject; + sessionProperties?: Record; + isMultichainOrigin: boolean; +}; + +/** + * The name of the CAIP-25 permission caveat. + */ +export const Caip25CaveatType = 'authorizedScopes'; + +/** + * Creates a CAIP-25 permission caveat. + * @param value - The CAIP-25 permission caveat value. + * @returns The CAIP-25 permission caveat (now including the type). + */ +export const createCaip25Caveat = (value: Caip25CaveatValue) => { + return { + type: Caip25CaveatType, + value, + }; +}; + +/** + * The target name of the CAIP-25 endowment permission. + */ +export const Caip25EndowmentPermissionName = 'endowment:caip25'; + +type Caip25EndowmentSpecification = ValidPermissionSpecification<{ + permissionType: PermissionType.Endowment; + targetName: typeof Caip25EndowmentPermissionName; + endowmentGetter: (_options?: EndowmentGetterParams) => null; + validator: PermissionValidatorConstraint; + allowedCaveats: Readonly> | null; +}>; + +type Caip25EndowmentSpecificationBuilderOptions = { + methodHooks: { + findNetworkClientIdByChainId: (chainId: Hex) => NetworkClientId; + listAccounts: () => { address: Hex }[]; + }; +}; + +/** + * Helper that returns a `endowment:caip25` specification that + * can be passed into the PermissionController constructor. + * + * @param builderOptions - The specification builder options. + * @param builderOptions.methodHooks - The RPC method hooks needed by the method implementation. + * @returns The specification for the `caip25` endowment. + */ +const specificationBuilder: PermissionSpecificationBuilder< + PermissionType.Endowment, + Caip25EndowmentSpecificationBuilderOptions, + Caip25EndowmentSpecification +> = ({ methodHooks }: Caip25EndowmentSpecificationBuilderOptions) => { + return { + permissionType: PermissionType.Endowment, + targetName: Caip25EndowmentPermissionName, + allowedCaveats: [Caip25CaveatType], + endowmentGetter: (_getterOptions?: EndowmentGetterParams) => null, + validator: (permission: PermissionConstraint) => { + const caip25Caveat = permission.caveats?.[0]; + if ( + permission.caveats?.length !== 1 || + caip25Caveat?.type !== Caip25CaveatType + ) { + throw new Error( + `${Caip25EndowmentPermissionName} error: Invalid caveats. There must be a single caveat of type "${Caip25CaveatType}".`, + ); + } + + if ( + !caip25Caveat.value || + !hasProperty(caip25Caveat.value, 'requiredScopes') || + !hasProperty(caip25Caveat.value, 'optionalScopes') || + !hasProperty(caip25Caveat.value, 'isMultichainOrigin') || + typeof caip25Caveat.value.isMultichainOrigin !== 'boolean' + ) { + throw new Error( + `${Caip25EndowmentPermissionName} error: Received invalid value for caveat of type "${Caip25CaveatType}".`, + ); + } + + const { requiredScopes, optionalScopes } = caip25Caveat.value; + + assertIsInternalScopesObject(requiredScopes); + assertIsInternalScopesObject(optionalScopes); + + const isChainIdSupported = (chainId: Hex) => { + try { + methodHooks.findNetworkClientIdByChainId(chainId); + return true; + } catch (err) { + return false; + } + }; + + const allRequiredScopesSupported = Object.keys(requiredScopes).every( + (scopeString) => + isSupportedScopeString(scopeString, isChainIdSupported), + ); + const allOptionalScopesSupported = Object.keys(optionalScopes).every( + (scopeString) => + isSupportedScopeString(scopeString, isChainIdSupported), + ); + if (!allRequiredScopesSupported || !allOptionalScopesSupported) { + throw new Error( + `${Caip25EndowmentPermissionName} error: Received scopeString value(s) for caveat of type "${Caip25CaveatType}" that are not supported by the wallet.`, + ); + } + + // Fetch EVM accounts from native wallet keyring + // These addresses are lowercased already + const existingEvmAddresses = methodHooks + .listAccounts() + .map((account) => account.address); + const ethAccounts = getEthAccounts({ + requiredScopes, + optionalScopes, + }).map((address) => address.toLowerCase() as Hex); + + const allEthAccountsSupported = ethAccounts.every((address) => + existingEvmAddresses.includes(address), + ); + if (!allEthAccountsSupported) { + throw new Error( + `${Caip25EndowmentPermissionName} error: Received eip155 account value(s) for caveat of type "${Caip25CaveatType}" that were not found in the wallet keyring.`, + ); + } + }, + }; +}; + +/** + * The `caip25` endowment specification builder. Passed to the + * `PermissionController` for constructing and validating the + * `endowment:caip25` permission. + */ +export const caip25EndowmentBuilder = Object.freeze({ + targetName: Caip25EndowmentPermissionName, + specificationBuilder, +} as const); + +/** + * Factories that construct caveat mutator functions that are passed to + * PermissionController.updatePermissionsByCaveat. + */ +export const Caip25CaveatMutators = { + [Caip25CaveatType]: { + removeScope, + removeAccount, + }, +}; + +/** + * Removes the account from the scope object. + * + * @param targetAddress - The address to remove from the scope object. + * @returns A function that removes the account from the scope object. + */ +function removeAccountFilterFn(targetAddress: string) { + return (account: CaipAccountId) => { + const parsed = parseCaipAccountId(account); + return parsed.address !== targetAddress; + }; +} + +/** + * Removes the account from the scope object. + * + * @param scopeObject - The scope object to remove the account from. + * @param targetAddress - The address to remove from the scope object. + */ +function removeAccountFromScopeObject( + scopeObject: InternalScopeObject, + targetAddress: string, +) { + if (scopeObject.accounts) { + scopeObject.accounts = scopeObject.accounts.filter( + removeAccountFilterFn(targetAddress), + ); + } +} + +/** + * Removes the target account from the scope object. + * + * @param caip25CaveatValue - The CAIP-25 permission caveat value from which to remove the account (across all chain scopes). + * @param targetAddress - The address to remove from the scope object. Not a CAIP-10 formatted address because it will be removed across each chain scope. + * @returns The updated scope object. + */ +function removeAccount( + caip25CaveatValue: Caip25CaveatValue, + targetAddress: Hex, +) { + const copyOfCaveatValue = cloneDeep(caip25CaveatValue); + + [copyOfCaveatValue.requiredScopes, copyOfCaveatValue.optionalScopes].forEach( + (scopes) => { + Object.entries(scopes).forEach(([, scopeObject]) => { + removeAccountFromScopeObject(scopeObject, targetAddress); + }); + }, + ); + + const noChange = isEqual(copyOfCaveatValue, caip25CaveatValue); + + if (noChange) { + return { + operation: CaveatMutatorOperation.Noop, + }; + } + + return { + operation: CaveatMutatorOperation.UpdateValue, + value: copyOfCaveatValue, + }; +} + +/** + * Removes the target account from the value arrays of the given + * `endowment:caip25` caveat. No-ops if the target scopeString is not in + * the existing scopes. + * + * @param caip25CaveatValue - The CAIP-25 permission caveat value to remove the scope from. + * @param targetScopeString - The scope that is being removed. + * @returns The updated CAIP-25 permission caveat value. + */ +function removeScope( + caip25CaveatValue: Caip25CaveatValue, + targetScopeString: ExternalScopeString, +) { + const newRequiredScopes = Object.entries( + caip25CaveatValue.requiredScopes, + ).filter(([scope]) => scope !== targetScopeString); + const newOptionalScopes = Object.entries( + caip25CaveatValue.optionalScopes, + ).filter(([scope]) => { + return scope !== targetScopeString; + }); + + const requiredScopesRemoved = + newRequiredScopes.length !== + Object.keys(caip25CaveatValue.requiredScopes).length; + const optionalScopesRemoved = + newOptionalScopes.length !== + Object.keys(caip25CaveatValue.optionalScopes).length; + + if (requiredScopesRemoved || optionalScopesRemoved) { + return { + operation: CaveatMutatorOperation.UpdateValue, + value: { + requiredScopes: Object.fromEntries(newRequiredScopes), + optionalScopes: Object.fromEntries(newOptionalScopes), + }, + }; + } + + return { + operation: CaveatMutatorOperation.Noop, + }; +} diff --git a/packages/multichain/src/index.test.ts b/packages/multichain/src/index.test.ts index bc062d3694..61f0fdcc42 100644 --- a/packages/multichain/src/index.test.ts +++ b/packages/multichain/src/index.test.ts @@ -1,9 +1,31 @@ -import greeter from '.'; +import * as allExports from '.'; -describe('Test', () => { - it('greets', () => { - const name = 'Huey'; - const result = greeter(name); - expect(result).toBe('Hello, Huey!'); +describe('@metamask/multichain', () => { + it('has expected JavaScript exports', () => { + expect(Object.keys(allExports)).toMatchInlineSnapshot(` + Array [ + "getEthAccounts", + "setEthAccounts", + "getPermittedEthChainIds", + "addPermittedEthChainId", + "setPermittedEthChainIds", + "validateAndNormalizeScopes", + "KnownWalletRpcMethods", + "KnownRpcMethods", + "KnownWalletNamespaceRpcMethods", + "KnownNotifications", + "KnownWalletScopeString", + "parseScopeString", + "normalizeScope", + "mergeScopeObject", + "mergeScopes", + "normalizeAndMergeScopes", + "Caip25CaveatType", + "createCaip25Caveat", + "Caip25EndowmentPermissionName", + "caip25EndowmentBuilder", + "Caip25CaveatMutators", + ] + `); }); }); diff --git a/packages/multichain/src/index.ts b/packages/multichain/src/index.ts index 6972c11729..d322c2b74d 100644 --- a/packages/multichain/src/index.ts +++ b/packages/multichain/src/index.ts @@ -1,9 +1,47 @@ -/** - * Example function that returns a greeting for the given name. - * - * @param name - The name to greet. - * @returns The greeting. - */ -export default function greeter(name: string): string { - return `Hello, ${name}!`; -} +export { + getEthAccounts, + setEthAccounts, +} from './adapters/caip-permission-adapter-eth-accounts'; +export { + getPermittedEthChainIds, + addPermittedEthChainId, + setPermittedEthChainIds, +} from './adapters/caip-permission-adapter-permittedChains'; + +export type { Caip25Authorization } from './scope/authorization'; +export { validateAndNormalizeScopes } from './scope/authorization'; +export { + KnownWalletRpcMethods, + KnownRpcMethods, + KnownWalletNamespaceRpcMethods, + KnownNotifications, + KnownWalletScopeString, +} from './scope/constants'; +export type { + ExternalScopeString, + ExternalScopeObject, + ExternalScopesObject, + InternalScopeString, + InternalScopeObject, + InternalScopesObject, + NormalizedScopeObject, + NormalizedScopesObject, + ScopedProperties, + NonWalletKnownCaipNamespace, +} from './scope/types'; +export { parseScopeString } from './scope/types'; +export { + normalizeScope, + mergeScopeObject, + mergeScopes, + normalizeAndMergeScopes, +} from './scope/transform'; + +export type { Caip25CaveatValue } from './caip25Permission'; +export { + Caip25CaveatType, + createCaip25Caveat, + Caip25EndowmentPermissionName, + caip25EndowmentBuilder, + Caip25CaveatMutators, +} from './caip25Permission'; diff --git a/packages/multichain/src/scope/assert.test.ts b/packages/multichain/src/scope/assert.test.ts new file mode 100644 index 0000000000..0fd23b5bf6 --- /dev/null +++ b/packages/multichain/src/scope/assert.test.ts @@ -0,0 +1,559 @@ +import * as Utils from '@metamask/utils'; + +import { + assertScopeSupported, + assertScopesSupported, + assertIsExternalScopesObject, + assertIsInternalScopesObject, +} from './assert'; +import { Caip25Errors } from './errors'; +import * as Supported from './supported'; +import type { NormalizedScopeObject } from './types'; + +jest.mock('./supported', () => ({ + isSupportedScopeString: jest.fn(), + isSupportedNotification: jest.fn(), + isSupportedMethod: jest.fn(), +})); + +jest.mock('@metamask/utils', () => ({ + ...jest.requireActual('@metamask/utils'), + isCaipReference: jest.fn(), + isCaipAccountId: jest.fn(), +})); + +const MockSupported = jest.mocked(Supported); +const MockUtils = jest.mocked(Utils); + +const validScopeObject: NormalizedScopeObject = { + methods: [], + notifications: [], + accounts: [], +}; + +describe('Scope Assert', () => { + beforeEach(() => { + MockUtils.isCaipReference.mockImplementation(() => true); + MockUtils.isCaipAccountId.mockImplementation(() => true); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + describe('assertScopeSupported', () => { + const isChainIdSupported = jest.fn(); + + describe('scopeString', () => { + it('checks if the scopeString is supported', () => { + try { + assertScopeSupported('scopeString', validScopeObject, { + isChainIdSupported, + }); + } catch (err) { + // noop + } + expect(MockSupported.isSupportedScopeString).toHaveBeenCalledWith( + 'scopeString', + isChainIdSupported, + ); + }); + + it('throws an error if the scopeString is not supported', () => { + MockSupported.isSupportedScopeString.mockReturnValue(false); + expect(() => { + assertScopeSupported('scopeString', validScopeObject, { + isChainIdSupported, + }); + }).toThrow(Caip25Errors.requestedChainsNotSupportedError()); + }); + }); + + describe('scopeObject', () => { + beforeEach(() => { + MockSupported.isSupportedScopeString.mockReturnValue(true); + }); + + it('checks if the methods are supported', () => { + try { + assertScopeSupported( + 'scopeString', + { + ...validScopeObject, + methods: ['eth_chainId'], + }, + { + isChainIdSupported, + }, + ); + } catch (err) { + // noop + } + + expect(MockSupported.isSupportedMethod).toHaveBeenCalledWith( + 'scopeString', + 'eth_chainId', + ); + }); + + it('throws an error if there are unsupported methods', () => { + MockSupported.isSupportedMethod.mockReturnValue(false); + expect(() => { + assertScopeSupported( + 'scopeString', + { + ...validScopeObject, + methods: ['eth_chainId'], + }, + { + isChainIdSupported, + }, + ); + }).toThrow(Caip25Errors.requestedMethodsNotSupportedError()); + }); + + it('checks if the notifications are supported', () => { + MockSupported.isSupportedMethod.mockReturnValue(true); + try { + assertScopeSupported( + 'scopeString', + { + ...validScopeObject, + notifications: ['chainChanged'], + }, + { + isChainIdSupported, + }, + ); + } catch (err) { + // noop + } + + expect(MockSupported.isSupportedNotification).toHaveBeenCalledWith( + 'scopeString', + 'chainChanged', + ); + }); + + it('throws an error if there are unsupported notifications', () => { + MockSupported.isSupportedMethod.mockReturnValue(true); + MockSupported.isSupportedNotification.mockReturnValue(false); + expect(() => { + assertScopeSupported( + 'scopeString', + { + ...validScopeObject, + notifications: ['chainChanged'], + }, + { + isChainIdSupported, + }, + ); + }).toThrow(Caip25Errors.requestedNotificationsNotSupportedError()); + }); + + it('does not throw if the scopeObject is valid', () => { + MockSupported.isSupportedMethod.mockReturnValue(true); + MockSupported.isSupportedNotification.mockReturnValue(true); + expect( + assertScopeSupported( + 'scopeString', + { + ...validScopeObject, + methods: ['eth_chainId'], + notifications: ['chainChanged'], + accounts: ['eip155:1:0xdeadbeef'], + }, + { + isChainIdSupported, + }, + ), + ).toBeUndefined(); + }); + }); + }); + + describe('assertScopesSupported', () => { + const isChainIdSupported = jest.fn(); + + it('does not throw an error if no scopes are defined', () => { + expect( + assertScopesSupported( + {}, + { + isChainIdSupported, + }, + ), + ).toBeUndefined(); + }); + + it('throws an error if any scope is invalid', () => { + MockSupported.isSupportedScopeString.mockReturnValue(false); + + expect(() => { + assertScopesSupported( + { + 'eip155:1': validScopeObject, + }, + { + isChainIdSupported, + }, + ); + }).toThrow(Caip25Errors.requestedChainsNotSupportedError()); + }); + + it('does not throw an error if all scopes are valid', () => { + MockSupported.isSupportedScopeString.mockReturnValue(true); + + expect( + assertScopesSupported( + { + 'eip155:1': validScopeObject, + 'eip155:2': validScopeObject, + }, + { + isChainIdSupported, + }, + ), + ).toBeUndefined(); + }); + }); + + describe('assertIsExternalScopesObject', () => { + it('does not throw if passed obj is a valid ExternalScopesObject with all valid properties', () => { + const obj = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => assertIsExternalScopesObject(obj)).not.toThrow(); + }); + + it('does not throw if passed obj is a valid ExternalScopesObject with some optional properties missing', () => { + const obj = { + accounts: ['eip155:1:0x1234'], + methods: ['method1'], + }; + expect(() => assertIsExternalScopesObject(obj)).not.toThrow(); + }); + + it('throws an error if passed obj is not an object', () => { + expect(() => assertIsExternalScopesObject(null)).toThrow( + 'ExternalScopesObject must be an object', + ); + expect(() => assertIsExternalScopesObject(123)).toThrow( + 'ExternalScopesObject must be an object', + ); + expect(() => assertIsExternalScopesObject('string')).toThrow( + 'ExternalScopesObject must be an object', + ); + }); + + it('throws and error if passed an object with an ExternalScopeObject value that is not an object', () => { + expect(() => assertIsExternalScopesObject({ 'eip155:1': 123 })).toThrow( + 'ExternalScopeObject must be an object', + ); + }); + + it('throws an error if passed an object with a key that is not a valid ExternalScopeString', () => { + jest.spyOn(Utils, 'isCaipReference').mockImplementation(() => false); + + expect(() => + assertIsExternalScopesObject({ 'invalid-scope-string': {} }), + ).toThrow('scopeString is not a valid ExternalScopeString'); + }); + + it('throws an error if passed an object with an ExternalScopeObject with a references property that is not an array', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: 'not-an-array', + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow( + 'ExternalScopeObject.references must be an array of CaipReference', + ); + }); + + it('throws an error if references contains invalid CaipReference', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['invalidRef'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + jest + .spyOn(Utils, 'isCaipReference') + .mockImplementation((ref) => ref !== 'invalidRef'); + + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow( + 'ExternalScopeObject.references must be an array of CaipReference', + ); + }); + + it('throws an error if passed an object with an ExternalScopeObject with an accounts property that is not an array', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: 'not-an-array', + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow( + 'ExternalScopeObject.accounts must be an array of CaipAccountId', + ); + }); + + it('throws an error if accounts contains invalid CaipAccountId', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234', 'invalidAccount'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + MockUtils.isCaipAccountId.mockImplementation( + (id) => id !== 'invalidAccount', + ); + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow( + 'ExternalScopeObject.accounts must be an array of CaipAccountId', + ); + }); + + it('throws an error if passed an object with an ExternalScopeObject with a methods property that is not an array', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: 'not-an-array', + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow('ExternalScopeObject.methods must be an array of strings'); + }); + + it('throws an error if methods contains non-string elements', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 123], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow('ExternalScopeObject.methods must be an array of strings'); + }); + + it('throws an error if passed an object with an ExternalScopeObject with a notifications property that is not an array', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: 'not-an-array', + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow( + 'ExternalScopeObject.notifications must be an array of strings', + ); + }); + + it('throws an error if notifications contains non-string elements', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1', false], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow( + 'ExternalScopeObject.notifications must be an array of strings', + ); + }); + + it('throws an error if passed an object with an ExternalScopeObject with a rpcDocuments property that is not an array', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: 'not-an-array', + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow('ExternalScopeObject.rpcDocuments must be an array of strings'); + }); + + it('throws an error if rpcDocuments contains non-string elements', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1', 456], + rpcEndpoints: ['endpoint1'], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow('ExternalScopeObject.rpcDocuments must be an array of strings'); + }); + + it('throws an error if passed an object with an ExternalScopeObject with a rpcEndpoints property that is not an array', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: 'not-an-array', + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow('ExternalScopeObject.rpcEndpoints must be an array of strings'); + }); + + it('throws an error if passed an object with an ExternalScopeObject with a rpcEndpoints property that contains non-string elements', () => { + const invalidExternalScopeObject = { + 'eip155:1': { + references: ['reference1', 'reference2'], + accounts: ['eip155:1:0x1234'], + methods: ['method1', 'method2'], + notifications: ['notification1'], + rpcDocuments: ['doc1'], + rpcEndpoints: ['endpoint1', null], + }, + }; + expect(() => + assertIsExternalScopesObject(invalidExternalScopeObject), + ).toThrow('ExternalScopeObject.rpcEndpoints must be an array of strings'); + }); + }); + + describe('assertIsInternalScopesObject', () => { + it('does not throw if passed obj is a valid InternalScopesObject with all valid properties', () => { + const obj = { + 'eip155:1': { + accounts: ['eip155:1:0x1234'], + }, + }; + expect(() => assertIsInternalScopesObject(obj)).not.toThrow(); + }); + + it('throws an error if passed obj is not an object', () => { + expect(() => assertIsInternalScopesObject(null)).toThrow( + 'InternalScopesObject must be an object', + ); + expect(() => assertIsInternalScopesObject(123)).toThrow( + 'InternalScopesObject must be an object', + ); + expect(() => assertIsInternalScopesObject('string')).toThrow( + 'InternalScopesObject must be an object', + ); + }); + + it('throws an error if passed an object with an InternalScopeObject value that is not an object', () => { + expect(() => assertIsInternalScopesObject({ 'eip155:1': 123 })).toThrow( + 'InternalScopeObject must be an object', + ); + }); + + it('throws an error if passed an object with a key that is not a valid InternalScopeString', () => { + jest.spyOn(Utils, 'isCaipReference').mockImplementation(() => false); + + expect(() => + assertIsInternalScopesObject({ 'invalid-scope-string': {} }), + ).toThrow('scopeString is not a valid InternalScopeString'); + }); + + it('throws an error if passed an object with an InternalScopeObject without an accounts property', () => { + const invalidInternalScopeObject = { + 'eip155:1': {}, + }; + expect(() => + assertIsInternalScopesObject(invalidInternalScopeObject), + ).toThrow( + 'InternalScopeObject.accounts must be an array of CaipAccountId', + ); + }); + + it('throws an error if passed an object with an InternalScopeObject with an accounts property that is not an array', () => { + const invalidInternalScopeObject = { + 'eip155:1': { + accounts: 'not-an-array', + }, + }; + expect(() => + assertIsInternalScopesObject(invalidInternalScopeObject), + ).toThrow( + 'InternalScopeObject.accounts must be an array of CaipAccountId', + ); + }); + + it('throws an error if accounts contains invalid CaipAccountId', () => { + const invalidInternalScopeObject = { + 'eip155:1': { + accounts: ['eip155:1:0x1234', 'invalidAccount'], + }, + }; + MockUtils.isCaipAccountId.mockImplementation( + (id) => id !== 'invalidAccount', + ); + expect(() => + assertIsInternalScopesObject(invalidInternalScopeObject), + ).toThrow( + 'InternalScopeObject.accounts must be an array of CaipAccountId', + ); + }); + }); +}); diff --git a/packages/multichain/src/scope/assert.ts b/packages/multichain/src/scope/assert.ts new file mode 100644 index 0000000000..0d2c8c16cb --- /dev/null +++ b/packages/multichain/src/scope/assert.ts @@ -0,0 +1,248 @@ +import { + hasProperty, + isCaipAccountId, + isCaipChainId, + isCaipNamespace, + isCaipReference, + KnownCaipNamespace, + type Hex, +} from '@metamask/utils'; + +import { Caip25Errors } from './errors'; +import { + isSupportedMethod, + isSupportedNotification, + isSupportedScopeString, +} from './supported'; +import type { + ExternalScopeObject, + ExternalScopesObject, + ExternalScopeString, + InternalScopeObject, + InternalScopesObject, + InternalScopeString, + NormalizedScopeObject, + NormalizedScopesObject, +} from './types'; + +/** + * Asserts that a scope string and its associated scope object are supported. + * @param scopeString - The scope string against which to assert support. + * @param scopeObject - The scope object against which to assert support. + * @param options - An object containing the following properties: + * @param options.isChainIdSupported - A predicate that determines if a chainID is supported. + */ +export const assertScopeSupported = ( + scopeString: string, + scopeObject: NormalizedScopeObject, + { + isChainIdSupported, + }: { + isChainIdSupported: (chainId: Hex) => boolean; + }, +) => { + const { methods, notifications } = scopeObject; + if (!isSupportedScopeString(scopeString, isChainIdSupported)) { + throw Caip25Errors.requestedChainsNotSupportedError(); + } + + const allMethodsSupported = methods.every((method) => + isSupportedMethod(scopeString, method), + ); + + if (!allMethodsSupported) { + throw Caip25Errors.requestedMethodsNotSupportedError(); + } + + if ( + notifications && + !notifications.every((notification) => + isSupportedNotification(scopeString, notification), + ) + ) { + throw Caip25Errors.requestedNotificationsNotSupportedError(); + } +}; + +/** + * Asserts that all scope strings and their associated scope objects are supported. + * @param scopes - The scopes object against which to assert support. + * @param options - An object containing the following properties: + * @param options.isChainIdSupported - A predicate that determines if a chainID is supported. + */ +export const assertScopesSupported = ( + scopes: NormalizedScopesObject, + { + isChainIdSupported, + }: { + isChainIdSupported: (chainId: Hex) => boolean; + }, +) => { + for (const [scopeString, scopeObject] of Object.entries(scopes)) { + assertScopeSupported(scopeString, scopeObject, { + isChainIdSupported, + }); + } +}; +/** + * Asserts that an object is a valid ExternalScopeObject. + * @param obj - The object to assert. + */ +function assertIsExternalScopeObject( + obj: unknown, +): asserts obj is ExternalScopeObject { + if (typeof obj !== 'object' || obj === null) { + throw new Error('ExternalScopeObject must be an object'); + } + + if (hasProperty(obj, 'references')) { + if ( + !Array.isArray(obj.references) || + !obj.references.every(isCaipReference) + ) { + throw new Error( + 'ExternalScopeObject.references must be an array of CaipReference', + ); + } + } + + if (hasProperty(obj, 'accounts')) { + if (!Array.isArray(obj.accounts) || !obj.accounts.every(isCaipAccountId)) { + throw new Error( + 'ExternalScopeObject.accounts must be an array of CaipAccountId', + ); + } + } + + if (hasProperty(obj, 'methods')) { + if ( + !Array.isArray(obj.methods) || + !obj.methods.every((method) => typeof method === 'string') + ) { + throw new Error( + 'ExternalScopeObject.methods must be an array of strings', + ); + } + } + + if (hasProperty(obj, 'notifications')) { + if ( + !Array.isArray(obj.notifications) || + !obj.notifications.every( + (notification) => typeof notification === 'string', + ) + ) { + throw new Error( + 'ExternalScopeObject.notifications must be an array of strings', + ); + } + } + + if (hasProperty(obj, 'rpcDocuments')) { + if ( + !Array.isArray(obj.rpcDocuments) || + !obj.rpcDocuments.every((doc) => typeof doc === 'string') + ) { + throw new Error( + 'ExternalScopeObject.rpcDocuments must be an array of strings', + ); + } + } + + if (hasProperty(obj, 'rpcEndpoints')) { + if ( + !Array.isArray(obj.rpcEndpoints) || + !obj.rpcEndpoints.every((endpoint) => typeof endpoint === 'string') + ) { + throw new Error( + 'ExternalScopeObject.rpcEndpoints must be an array of strings', + ); + } + } +} + +/** + * Asserts that a scope string is a valid ExternalScopeString. + * @param scopeString - The scope string to assert. + */ +function assertIsExternalScopeString( + scopeString: unknown, +): asserts scopeString is ExternalScopeString { + if ( + typeof scopeString !== 'string' || + (!isCaipNamespace(scopeString) && !isCaipChainId(scopeString)) + ) { + throw new Error('scopeString is not a valid ExternalScopeString'); + } +} + +/** + * Asserts that an object is a valid ExternalScopesObject. + * @param obj - The object to assert. + */ +export function assertIsExternalScopesObject( + obj: unknown, +): asserts obj is ExternalScopesObject { + if (typeof obj !== 'object' || obj === null) { + throw new Error('ExternalScopesObject must be an object'); + } + + for (const [scopeString, scopeObject] of Object.entries(obj)) { + assertIsExternalScopeString(scopeString); + assertIsExternalScopeObject(scopeObject); + } +} + +/** + * Asserts that an object is a valid InternalScopeObject. + * @param obj - The object to assert. + */ +function assertIsInternalScopeObject( + obj: unknown, +): asserts obj is InternalScopeObject { + if (typeof obj !== 'object' || obj === null) { + throw new Error('InternalScopeObject must be an object'); + } + + if ( + !hasProperty(obj, 'accounts') || + !Array.isArray(obj.accounts) || + !obj.accounts.every(isCaipAccountId) + ) { + throw new Error( + 'InternalScopeObject.accounts must be an array of CaipAccountId', + ); + } +} + +/** + * Asserts that a scope string is a valid InternalScopeString. + * @param scopeString - The scope string to assert. + */ +function assertIsInternalScopeString( + scopeString: unknown, +): asserts scopeString is InternalScopeString { + if ( + typeof scopeString !== 'string' || + (scopeString !== KnownCaipNamespace.Wallet && !isCaipChainId(scopeString)) + ) { + throw new Error('scopeString is not a valid InternalScopeString'); + } +} + +/** + * Asserts that an object is a valid InternalScopesObject. + * @param obj - The object to assert. + */ +export function assertIsInternalScopesObject( + obj: unknown, +): asserts obj is InternalScopesObject { + if (typeof obj !== 'object' || obj === null) { + throw new Error('InternalScopesObject must be an object'); + } + + for (const [scopeString, scopeObject] of Object.entries(obj)) { + assertIsInternalScopeString(scopeString); + assertIsInternalScopeObject(scopeObject); + } +} diff --git a/packages/multichain/src/scope/authorization.test.ts b/packages/multichain/src/scope/authorization.test.ts new file mode 100644 index 0000000000..4759b40edd --- /dev/null +++ b/packages/multichain/src/scope/authorization.test.ts @@ -0,0 +1,91 @@ +import { validateAndNormalizeScopes } from './authorization'; +import * as Transform from './transform'; +import type { ExternalScopeObject } from './types'; +import * as Validation from './validation'; + +jest.mock('./validation', () => ({ + getValidScopes: jest.fn(), +})); +const MockValidation = jest.mocked(Validation); + +jest.mock('./transform', () => ({ + normalizeAndMergeScopes: jest.fn(), +})); +const MockTransform = jest.mocked(Transform); + +const validScopeObject: ExternalScopeObject = { + methods: [], + notifications: [], +}; + +describe('Scope Authorization', () => { + describe('validateAndNormalizeScopes', () => { + it('validates the scopes', () => { + MockValidation.getValidScopes.mockReturnValue({ + validRequiredScopes: {}, + validOptionalScopes: {}, + }); + validateAndNormalizeScopes( + { + 'eip155:1': validScopeObject, + }, + { + 'eip155:5': validScopeObject, + }, + ); + expect(MockValidation.getValidScopes).toHaveBeenCalledWith( + { + 'eip155:1': validScopeObject, + }, + { + 'eip155:5': validScopeObject, + }, + ); + }); + + it('normalizes and merges the validated scopes', () => { + MockValidation.getValidScopes.mockReturnValue({ + validRequiredScopes: { + 'eip155:1': validScopeObject, + }, + validOptionalScopes: { + 'eip155:5': validScopeObject, + }, + }); + + validateAndNormalizeScopes({}, {}); + expect(MockTransform.normalizeAndMergeScopes).toHaveBeenCalledWith({ + 'eip155:1': validScopeObject, + }); + expect(MockTransform.normalizeAndMergeScopes).toHaveBeenCalledWith({ + 'eip155:5': validScopeObject, + }); + }); + + it('returns the normalized and merged scopes', () => { + MockValidation.getValidScopes.mockReturnValue({ + validRequiredScopes: { + 'eip155:1': validScopeObject, + }, + validOptionalScopes: { + 'eip155:5': validScopeObject, + }, + }); + MockTransform.normalizeAndMergeScopes.mockImplementation((value) => ({ + ...value, + transformed: true, + })); + + expect(validateAndNormalizeScopes({}, {})).toStrictEqual({ + normalizedRequiredScopes: { + 'eip155:1': validScopeObject, + transformed: true, + }, + normalizedOptionalScopes: { + 'eip155:5': validScopeObject, + transformed: true, + }, + }); + }); + }); +}); diff --git a/packages/multichain/src/scope/authorization.ts b/packages/multichain/src/scope/authorization.ts new file mode 100644 index 0000000000..0f43fa33e4 --- /dev/null +++ b/packages/multichain/src/scope/authorization.ts @@ -0,0 +1,53 @@ +import type { Json } from '@metamask/utils'; + +import { normalizeAndMergeScopes } from './transform'; +import type { + ExternalScopesObject, + ExternalScopeString, + NormalizedScopesObject, +} from './types'; +import { getValidScopes } from './validation'; + +/** + * Represents the parameters of a [CAIP-25](https://chainagnostic.org/CAIPs/caip-25) request. + */ +export type Caip25Authorization = ( + | { + requiredScopes: ExternalScopesObject; + optionalScopes?: ExternalScopesObject; + } + | { + requiredScopes?: ExternalScopesObject; + optionalScopes: ExternalScopesObject; + } +) & { + sessionProperties?: Record; + scopedProperties?: Record; +}; + +/** + * Validates and normalizes a set of scopes according to the [CAIP-217](https://chainagnostic.org/CAIPs/caip-217) spec. + * @param requiredScopes - The required scopes to validate and normalize. + * @param optionalScopes - The optional scopes to validate and normalize. + * @returns An object containing the normalized required scopes and normalized optional scopes. + */ +export const validateAndNormalizeScopes = ( + requiredScopes: ExternalScopesObject, + optionalScopes: ExternalScopesObject, +): { + normalizedRequiredScopes: NormalizedScopesObject; + normalizedOptionalScopes: NormalizedScopesObject; +} => { + const { validRequiredScopes, validOptionalScopes } = getValidScopes( + requiredScopes, + optionalScopes, + ); + + const normalizedRequiredScopes = normalizeAndMergeScopes(validRequiredScopes); + const normalizedOptionalScopes = normalizeAndMergeScopes(validOptionalScopes); + + return { + normalizedRequiredScopes, + normalizedOptionalScopes, + }; +}; diff --git a/packages/multichain/src/scope/constants.test.ts b/packages/multichain/src/scope/constants.test.ts new file mode 100644 index 0000000000..8369ec721a --- /dev/null +++ b/packages/multichain/src/scope/constants.test.ts @@ -0,0 +1,59 @@ +import { KnownRpcMethods } from './constants'; + +describe('KnownRpcMethods', () => { + it('should match the snapshot', () => { + expect(KnownRpcMethods).toMatchInlineSnapshot(` + Object { + "bip122": Array [], + "eip155": Array [ + "wallet_switchEthereumChain", + "wallet_getPermissions", + "wallet_requestPermissions", + "wallet_revokePermissions", + "personal_sign", + "eth_signTypedData_v4", + "wallet_watchAsset", + "eth_requestAccounts", + "eth_accounts", + "eth_sendTransaction", + "eth_decrypt", + "eth_getEncryptionPublicKey", + "web3_clientVersion", + "eth_subscribe", + "eth_unsubscribe", + "eth_blockNumber", + "eth_call", + "eth_chainId", + "eth_coinbase", + "eth_estimateGas", + "eth_feeHistory", + "eth_gasPrice", + "eth_getBalance", + "eth_getBlockByHash", + "eth_getBlockByNumber", + "eth_getBlockTransactionCountByHash", + "eth_getBlockTransactionCountByNumber", + "eth_getCode", + "eth_getFilterChanges", + "eth_getFilterLogs", + "eth_getLogs", + "eth_getProof", + "eth_getStorageAt", + "eth_getTransactionByBlockHashAndIndex", + "eth_getTransactionByBlockNumberAndIndex", + "eth_getTransactionByHash", + "eth_getTransactionCount", + "eth_getTransactionReceipt", + "eth_getUncleCountByBlockHash", + "eth_getUncleCountByBlockNumber", + "eth_newBlockFilter", + "eth_newFilter", + "eth_newPendingTransactionFilter", + "eth_sendRawTransaction", + "eth_syncing", + "eth_uninstallFilter", + ], + } + `); + }); +}); diff --git a/packages/multichain/src/scope/constants.ts b/packages/multichain/src/scope/constants.ts new file mode 100644 index 0000000000..6a9427fd4f --- /dev/null +++ b/packages/multichain/src/scope/constants.ts @@ -0,0 +1,65 @@ +import MetaMaskOpenRPCDocument from '@metamask/api-specs'; + +import type { NonWalletKnownCaipNamespace } from './types'; + +/** + * ScopeStrings for offchain methods that are not specific to a chainId but are specific to a CAIP namespace. + */ +export enum KnownWalletScopeString { + Eip155 = 'wallet:eip155', +} + +/** + * Regexes defining how references must be formed for non-wallet known CAIP namespaces + */ +export const CaipReferenceRegexes: Record = + { + eip155: /^(0|[1-9][0-9]*)$/u, + bip122: /.*/u, + }; + +/** + * Methods that do not belong exclusively to any CAIP namespace. + */ +export const KnownWalletRpcMethods: string[] = [ + 'wallet_registerOnboarding', + 'wallet_scanQRCode', +]; + +const WalletEip155Methods = ['wallet_addEthereumChain']; + +/** + * All MetaMask methods, except for ones we have specified in the constants above. + */ +const Eip155Methods = MetaMaskOpenRPCDocument.methods + .map(({ name }: { name: string }) => name) + .filter((method: string) => !WalletEip155Methods.includes(method)) + .filter((method: string) => !KnownWalletRpcMethods.includes(method)); + +/** + * Methods by ecosystem that are chain specific. + */ +export const KnownRpcMethods: Record = { + eip155: Eip155Methods, + bip122: [], +}; + +/** + * Methods for CAIP namespaces that aren't chain specific. + */ +export const KnownWalletNamespaceRpcMethods: Record< + NonWalletKnownCaipNamespace, + string[] +> = { + eip155: WalletEip155Methods, + bip122: [], +}; + +/** + * Notifications for known CAIP namespaces. + */ +export const KnownNotifications: Record = + { + eip155: ['eth_subscription'], + bip122: [], + }; diff --git a/packages/multichain/src/scope/errors.test.ts b/packages/multichain/src/scope/errors.test.ts new file mode 100644 index 0000000000..f176cd36d8 --- /dev/null +++ b/packages/multichain/src/scope/errors.test.ts @@ -0,0 +1,40 @@ +import { Caip25Errors } from './errors'; + +describe('Caip25Errors', () => { + it('requestedChainsNotSupportedError', () => { + expect(Caip25Errors.requestedChainsNotSupportedError().message).toBe( + 'Requested chains are not supported', + ); + expect(Caip25Errors.requestedChainsNotSupportedError().code).toBe(5100); + }); + + it('requestedMethodsNotSupportedError', () => { + expect(Caip25Errors.requestedMethodsNotSupportedError().message).toBe( + 'Requested methods are not supported', + ); + expect(Caip25Errors.requestedMethodsNotSupportedError().code).toBe(5101); + }); + + it('requestedNotificationsNotSupportedError', () => { + expect(Caip25Errors.requestedNotificationsNotSupportedError().message).toBe( + 'Requested notifications are not supported', + ); + expect(Caip25Errors.requestedNotificationsNotSupportedError().code).toBe( + 5102, + ); + }); + + it('unknownMethodsRequestedError', () => { + expect(Caip25Errors.unknownMethodsRequestedError().message).toBe( + 'Unknown method(s) requested', + ); + expect(Caip25Errors.unknownMethodsRequestedError().code).toBe(5201); + }); + + it('unknownNotificationsRequestedError', () => { + expect(Caip25Errors.unknownNotificationsRequestedError().message).toBe( + 'Unknown notification(s) requested', + ); + expect(Caip25Errors.unknownNotificationsRequestedError().code).toBe(5202); + }); +}); diff --git a/packages/multichain/src/scope/errors.ts b/packages/multichain/src/scope/errors.ts new file mode 100644 index 0000000000..97ff9c9872 --- /dev/null +++ b/packages/multichain/src/scope/errors.ts @@ -0,0 +1,48 @@ +import { JsonRpcError } from '@metamask/rpc-errors'; + +/** + * CAIP25 Errors. + */ +export const Caip25Errors = { + /** + * Thrown when chains requested in a CAIP-25 `wallet_createSession` call are not supported by the wallet. + * Defined in [CAIP-25 error codes section](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-25.md#trusted-failure-codes). + * @returns A new JsonRpcError instance. + */ + requestedChainsNotSupportedError: () => + new JsonRpcError(5100, 'Requested chains are not supported'), + + /** + * Thrown when methods requested in a CAIP-25 `wallet_createSession` call are not supported by the wallet. + * Defined in [CAIP-25 error codes section](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-25.md#trusted-failure-codes). + * TODO: consider throwing the more generic version of this error (UNKNOWN_METHODS_REQUESTED_ERROR) unless in a DevMode build of the wallet + * @returns A new JsonRpcError instance. + */ + requestedMethodsNotSupportedError: () => + new JsonRpcError(5101, 'Requested methods are not supported'), + + /** + * Thrown when notifications requested in a CAIP-25 `wallet_createSession` call are not supported by the wallet. + * Defined in [CAIP-25 error codes section](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-25.md#trusted-failure-codes). + * TODO: consider throwing the more generic version of this error (UNKNOWN_NOTIFICATIONS_REQUESTED_ERROR) unless in a DevMode build of the wallet + * @returns A new JsonRpcError instance. + */ + requestedNotificationsNotSupportedError: () => + new JsonRpcError(5102, 'Requested notifications are not supported'), + + /** + * Thrown when methods requested in a CAIP-25 `wallet_createSession` call are not supported by the wallet. + * Defined in [CAIP-25 error codes section](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-25.md#trusted-failure-codes). + * @returns A new JsonRpcError instance. + */ + unknownMethodsRequestedError: () => + new JsonRpcError(5201, 'Unknown method(s) requested'), + + /** + * Thrown when notifications requested in a CAIP-25 `wallet_createSession` call are not supported by the wallet. + * Defined in [CAIP-25 error codes section](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-25.md#trusted-failure-codes). + * @returns A new JsonRpcError instance. + */ + unknownNotificationsRequestedError: () => + new JsonRpcError(5202, 'Unknown notification(s) requested'), +}; diff --git a/packages/multichain/src/scope/supported.test.ts b/packages/multichain/src/scope/supported.test.ts new file mode 100644 index 0000000000..4f431110b7 --- /dev/null +++ b/packages/multichain/src/scope/supported.test.ts @@ -0,0 +1,274 @@ +import { + KnownNotifications, + KnownRpcMethods, + KnownWalletNamespaceRpcMethods, + KnownWalletRpcMethods, +} from './constants'; +import { + isSupportedAccount, + isSupportedMethod, + isSupportedNotification, + isSupportedScopeString, +} from './supported'; + +describe('Scope Support', () => { + describe('isSupportedNotification', () => { + it.each(Object.entries(KnownNotifications))( + 'returns true for each %s scope method', + (scopeString: string, notifications: string[]) => { + notifications.forEach((notification) => { + expect(isSupportedNotification(scopeString, notification)).toBe(true); + }); + }, + ); + + it('returns false otherwise', () => { + expect(isSupportedNotification('eip155', 'anything else')).toBe(false); + expect(isSupportedNotification('', '')).toBe(false); + }); + + it('returns false for unknown namespaces', () => { + expect(isSupportedNotification('unknown', 'anything else')).toBe(false); + }); + + it('returns false for wallet namespace', () => { + expect(isSupportedNotification('wallet', 'anything else')).toBe(false); + }); + }); + + describe('isSupportedMethod', () => { + it.each(Object.entries(KnownRpcMethods))( + 'returns true for each %s scoped method', + (scopeString: string, methods: string[]) => { + methods.forEach((method) => { + expect(isSupportedMethod(scopeString, method)).toBe(true); + }); + }, + ); + + it('returns true for each wallet scoped method', () => { + KnownWalletRpcMethods.forEach((method) => { + expect(isSupportedMethod('wallet', method)).toBe(true); + }); + }); + + it.each(Object.entries(KnownWalletNamespaceRpcMethods))( + 'returns true for each wallet:%s scoped method', + (scopeString: string, methods: string[]) => { + methods.forEach((method) => { + expect(isSupportedMethod(`wallet:${scopeString}`, method)).toBe(true); + }); + }, + ); + + it('returns false otherwise', () => { + expect(isSupportedMethod('eip155', 'anything else')).toBe(false); + expect(isSupportedMethod('wallet:unknown', 'anything else')).toBe(false); + expect(isSupportedMethod('', '')).toBe(false); + }); + }); + + describe('isSupportedScopeString', () => { + it('returns true for the wallet namespace', () => { + expect(isSupportedScopeString('wallet', jest.fn())).toBe(true); + }); + + it('returns false for the wallet namespace when a reference is included', () => { + expect(isSupportedScopeString('wallet:someref', jest.fn())).toBe(false); + }); + + it('returns true for the ethereum namespace', () => { + expect(isSupportedScopeString('eip155', jest.fn())).toBe(true); + }); + + it('returns false for unknown namespaces', () => { + expect(isSupportedScopeString('unknown', jest.fn())).toBe(false); + }); + + it('returns true for the wallet namespace with eip155 reference', () => { + expect(isSupportedScopeString('wallet:eip155', jest.fn())).toBe(true); + }); + + it('returns false for the wallet namespace with eip155 reference', () => { + expect(isSupportedScopeString('wallet:eip155', jest.fn())).toBe(true); + }); + + it('returns true for the ethereum namespace when a network client exists for the reference', () => { + const isChainIdSupportedMock = jest.fn().mockReturnValue(true); + expect(isSupportedScopeString('eip155:1', isChainIdSupportedMock)).toBe( + true, + ); + }); + + it('returns false for the ethereum namespace when a network client does not exist for the reference', () => { + const isChainIdSupportedMock = jest.fn().mockReturnValue(false); + expect(isSupportedScopeString('eip155:1', isChainIdSupportedMock)).toBe( + false, + ); + }); + + it('returns false for the ethereum namespace when the reference is malformed', () => { + const isChainIdSupportedMock = jest.fn().mockReturnValue(true); + expect(isSupportedScopeString('eip155:01', isChainIdSupportedMock)).toBe( + false, + ); + expect(isSupportedScopeString('eip155:1e1', isChainIdSupportedMock)).toBe( + false, + ); + }); + }); + + describe('isSupportedAccount', () => { + it('returns true if eoa account matching eip155 namespaced address exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:eoa', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('eip155:1:0xdeadbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns true if eoa account matching eip155 namespaced address with different casing exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:eoa', + address: '0xdeadBEEF', + }, + ]); + expect( + isSupportedAccount('eip155:1:0xDEADbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns true if erc4337 account matching eip155 namespaced address exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:erc4337', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('eip155:1:0xdeadbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns true if erc4337 account matching eip155 namespaced address with different casing exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:erc4337', + address: '0xdeadBEEF', + }, + ]); + expect( + isSupportedAccount('eip155:1:0xDEADbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns false if neither eoa or erc4337 account matching eip155 namespaced address exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'other', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('eip155:1:0xdeadbeef', getInternalAccounts), + ).toBe(false); + }); + + it('returns true if eoa account matching wallet:eip155 address exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:eoa', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('wallet:eip155:0xdeadbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns true if eoa account matching wallet:eip155 address with different casing exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:eoa', + address: '0xdeadBEEF', + }, + ]); + expect( + isSupportedAccount('wallet:eip155:0xDEADbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns true if erc4337 account matching wallet:eip155 address exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:erc4337', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('wallet:eip155:0xdeadbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns true if erc4337 account matching wallet:eip155 address with different casing exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:erc4337', + address: '0xdeadBEEF', + }, + ]); + expect( + isSupportedAccount('wallet:eip155:0xDEADbeef', getInternalAccounts), + ).toBe(true); + }); + + it('returns false if neither eoa or erc4337 account matching wallet:eip155 address exists', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'other', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('wallet:eip155:0xdeadbeef', getInternalAccounts), + ).toBe(false); + }); + + it('returns false if wallet namespace with unknown reference', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:eoa', + address: '0xdeadbeef', + }, + { + type: 'eip155:erc4337', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('wallet:foobar:0xdeadbeef', getInternalAccounts), + ).toBe(false); + }); + + it('returns false if unknown namespace', () => { + const getInternalAccounts = jest.fn().mockReturnValue([ + { + type: 'eip155:eoa', + address: '0xdeadbeef', + }, + { + type: 'eip155:erc4337', + address: '0xdeadbeef', + }, + ]); + expect( + isSupportedAccount('foo:bar:0xdeadbeef', getInternalAccounts), + ).toBe(false); + }); + }); +}); diff --git a/packages/multichain/src/scope/supported.ts b/packages/multichain/src/scope/supported.ts new file mode 100644 index 0000000000..e05e2c4dbf --- /dev/null +++ b/packages/multichain/src/scope/supported.ts @@ -0,0 +1,145 @@ +import { toHex, isEqualCaseInsensitive } from '@metamask/controller-utils'; +import type { CaipAccountId, Hex } from '@metamask/utils'; +import { KnownCaipNamespace, parseCaipAccountId } from '@metamask/utils'; + +import { + CaipReferenceRegexes, + KnownNotifications, + KnownRpcMethods, + KnownWalletNamespaceRpcMethods, + KnownWalletRpcMethods, +} from './constants'; +import type { ExternalScopeString } from './types'; +import { parseScopeString } from './types'; + +/** + * Determines if a scope string is supported. + * @param scopeString - The scope string to check. + * @param isChainIdSupported - A predicate that determines if a chainID is supported. + * @returns A boolean indicating if the scope string is supported. + */ +export const isSupportedScopeString = ( + scopeString: string, + isChainIdSupported: (chainId: Hex) => boolean, +) => { + const { namespace, reference } = parseScopeString(scopeString); + + switch (namespace) { + case KnownCaipNamespace.Wallet: + return !reference || reference === KnownCaipNamespace.Eip155; + case KnownCaipNamespace.Eip155: + return ( + !reference || + (CaipReferenceRegexes.eip155.test(reference) && + isChainIdSupported(toHex(reference))) + ); + default: + return false; + } +}; + +/** + * Determines if an account is supported by the wallet (i.e. on a keyring known to the wallet). + * @param account - The CAIP account ID to check. + * @param getInternalAccounts - A function that returns the internal accounts. + * @returns A boolean indicating if the account is supported by the wallet. + */ +export const isSupportedAccount = ( + account: CaipAccountId, + getInternalAccounts: () => { type: string; address: string }[], +) => { + const { + address, + chain: { namespace, reference }, + } = parseCaipAccountId(account); + + const isSupportedEip155Account = () => + getInternalAccounts().some( + (internalAccount) => + ['eip155:eoa', 'eip155:erc4337'].includes(internalAccount.type) && + isEqualCaseInsensitive(address, internalAccount.address), + ); + + switch (namespace) { + case KnownCaipNamespace.Wallet: + return reference === KnownCaipNamespace.Eip155 + ? isSupportedEip155Account() + : false; + case KnownCaipNamespace.Eip155: + return isSupportedEip155Account(); + default: + return false; + } +}; + +/** + * Determines if a method is supported by the wallet. + * @param scopeString - The scope string to check. + * @param method - The method to check. + * @returns A boolean indicating if the method is supported by the wallet. + */ +export const isSupportedMethod = ( + scopeString: ExternalScopeString, + method: string, +): boolean => { + const { namespace, reference } = parseScopeString(scopeString); + + if (!namespace || !isKnownCaipNamespace(namespace)) { + return false; + } + + if (namespace === KnownCaipNamespace.Wallet) { + if (reference) { + if ( + !isKnownCaipNamespace(reference) || + reference === KnownCaipNamespace.Wallet + ) { + return false; + } + return KnownWalletNamespaceRpcMethods[reference].includes(method); + } + + return KnownWalletRpcMethods.includes(method); + } + + return KnownRpcMethods[namespace].includes(method); +}; + +/** + * Determines if a notification is supported by the wallet. + * @param scopeString - The scope string to check. + * @param notification - The notification to check. + * @returns A boolean indicating if the notification is supported by the wallet. + */ +export const isSupportedNotification = ( + scopeString: ExternalScopeString, + notification: string, +): boolean => { + const { namespace } = parseScopeString(scopeString); + + if ( + !namespace || + !isKnownCaipNamespace(namespace) || + namespace === KnownCaipNamespace.Wallet + ) { + return false; + } + + return KnownNotifications[namespace].includes(notification); +}; + +/** + * Checks whether the given namespace is a known CAIP namespace. + * + * @param namespace - The namespace to check + * @returns Whether the given namespace is a known CAIP namespace. + */ +function isKnownCaipNamespace( + namespace: string, +): namespace is KnownCaipNamespace { + const knownNamespaces = Object.keys(KnownCaipNamespace).map((key) => + key.toLowerCase(), + ); + + return knownNamespaces.includes(namespace); +} diff --git a/packages/multichain/src/scope/transform.test.ts b/packages/multichain/src/scope/transform.test.ts new file mode 100644 index 0000000000..b5e01b5cce --- /dev/null +++ b/packages/multichain/src/scope/transform.test.ts @@ -0,0 +1,380 @@ +import { + normalizeScope, + mergeScopes, + mergeScopeObject, + normalizeAndMergeScopes, +} from './transform'; +import type { ExternalScopeObject, NormalizedScopeObject } from './types'; + +const externalScopeObject: ExternalScopeObject = { + methods: [], + notifications: [], +}; + +const validScopeObject: NormalizedScopeObject = { + methods: [], + notifications: [], + accounts: [], +}; + +describe('Scope Transform', () => { + describe('normalizeScope', () => { + describe('scopeString is chain scoped', () => { + it('returns the scope with empty accounts array when accounts are not defined', () => { + expect(normalizeScope('eip155:1', externalScopeObject)).toStrictEqual({ + 'eip155:1': { + ...externalScopeObject, + accounts: [], + }, + }); + }); + + it('returns the scope unchanged when accounts are defined', () => { + expect( + normalizeScope('eip155:1', { ...externalScopeObject, accounts: [] }), + ).toStrictEqual({ + 'eip155:1': { + ...externalScopeObject, + accounts: [], + }, + }); + }); + }); + + describe('scopeString is namespace scoped', () => { + it('returns the scope as is when `references` is not defined', () => { + expect(normalizeScope('eip155', validScopeObject)).toStrictEqual({ + eip155: validScopeObject, + }); + }); + + it('returns one scope per `references` element with `references` excluded from the scopeObject', () => { + expect( + normalizeScope('eip155', { + ...validScopeObject, + references: ['1', '5', '64'], + }), + ).toStrictEqual({ + 'eip155:1': validScopeObject, + 'eip155:5': validScopeObject, + 'eip155:64': validScopeObject, + }); + }); + + it('returns one deep cloned scope per `references` element', () => { + const normalizedScopes = normalizeScope('eip155', { + ...validScopeObject, + references: ['1', '5'], + }); + + expect(normalizedScopes['eip155:1']).not.toBe( + normalizedScopes['eip155:5'], + ); + expect(normalizedScopes['eip155:1'].methods).not.toBe( + normalizedScopes['eip155:5'].methods, + ); + }); + + it('returns the scope as is when `references` is an empty array', () => { + expect( + normalizeScope('eip155', { ...validScopeObject, references: [] }), + ).toStrictEqual({ + eip155: validScopeObject, + }); + }); + }); + }); + + describe('mergeScopeObject', () => { + it('returns an object with the unique set of methods', () => { + expect( + mergeScopeObject( + { + ...validScopeObject, + methods: ['a', 'b', 'c'], + }, + { + ...validScopeObject, + methods: ['b', 'c', 'd'], + }, + ), + ).toStrictEqual({ + ...validScopeObject, + methods: ['a', 'b', 'c', 'd'], + }); + }); + + it('returns an object with the unique set of notifications', () => { + expect( + mergeScopeObject( + { + ...validScopeObject, + notifications: ['a', 'b', 'c'], + }, + { + ...validScopeObject, + notifications: ['b', 'c', 'd'], + }, + ), + ).toStrictEqual({ + ...validScopeObject, + notifications: ['a', 'b', 'c', 'd'], + }); + }); + + it('returns an object with the unique set of accounts', () => { + expect( + mergeScopeObject( + { + ...validScopeObject, + accounts: ['eip155:1:a', 'eip155:1:b', 'eip155:1:c'], + }, + { + ...validScopeObject, + accounts: ['eip155:1:b', 'eip155:1:c', 'eip155:1:d'], + }, + ), + ).toStrictEqual({ + ...validScopeObject, + accounts: ['eip155:1:a', 'eip155:1:b', 'eip155:1:c', 'eip155:1:d'], + }); + + expect( + mergeScopeObject( + { + ...validScopeObject, + accounts: ['eip155:1:a', 'eip155:1:b', 'eip155:1:c'], + }, + { + ...validScopeObject, + }, + ), + ).toStrictEqual({ + ...validScopeObject, + accounts: ['eip155:1:a', 'eip155:1:b', 'eip155:1:c'], + }); + }); + + it('returns an object with the unique set of rpcDocuments', () => { + expect( + mergeScopeObject( + { + ...validScopeObject, + rpcDocuments: ['a', 'b', 'c'], + }, + { + ...validScopeObject, + rpcDocuments: ['b', 'c', 'd'], + }, + ), + ).toStrictEqual({ + ...validScopeObject, + rpcDocuments: ['a', 'b', 'c', 'd'], + }); + + expect( + mergeScopeObject( + { + ...validScopeObject, + rpcDocuments: ['a', 'b', 'c'], + }, + { + ...validScopeObject, + }, + ), + ).toStrictEqual({ + ...validScopeObject, + rpcDocuments: ['a', 'b', 'c'], + }); + + expect( + mergeScopeObject( + { + ...validScopeObject, + }, + { + ...validScopeObject, + rpcDocuments: ['a', 'b', 'c'], + }, + ), + ).toStrictEqual({ + ...validScopeObject, + rpcDocuments: ['a', 'b', 'c'], + }); + }); + + it('returns an object with the unique set of rpcEndpoints', () => { + expect( + mergeScopeObject( + { + ...validScopeObject, + rpcEndpoints: ['a', 'b', 'c'], + }, + { + ...validScopeObject, + rpcEndpoints: ['b', 'c', 'd'], + }, + ), + ).toStrictEqual({ + ...validScopeObject, + rpcEndpoints: ['a', 'b', 'c', 'd'], + }); + + expect( + mergeScopeObject( + { + ...validScopeObject, + rpcEndpoints: ['a', 'b', 'c'], + }, + { + ...validScopeObject, + }, + ), + ).toStrictEqual({ + ...validScopeObject, + rpcEndpoints: ['a', 'b', 'c'], + }); + + expect( + mergeScopeObject( + { + ...validScopeObject, + }, + { + ...validScopeObject, + rpcEndpoints: ['a', 'b', 'c'], + }, + ), + ).toStrictEqual({ + ...validScopeObject, + rpcEndpoints: ['a', 'b', 'c'], + }); + }); + }); + + describe('mergeScopes', () => { + it('merges the scopeObjects with matching scopeString', () => { + expect( + mergeScopes( + { + 'eip155:1': { + methods: ['a', 'b', 'c'], + notifications: ['foo'], + accounts: [], + }, + }, + { + 'eip155:1': { + methods: ['c', 'd'], + notifications: ['bar'], + accounts: [], + }, + }, + ), + ).toStrictEqual({ + 'eip155:1': { + methods: ['a', 'b', 'c', 'd'], + notifications: ['foo', 'bar'], + accounts: [], + }, + }); + }); + + it('preserves the scopeObjects with no matching scopeString', () => { + expect( + mergeScopes( + { + 'eip155:1': { + methods: ['a', 'b', 'c'], + notifications: ['foo'], + accounts: [], + }, + }, + { + 'eip155:2': { + methods: ['c', 'd'], + notifications: ['bar'], + accounts: [], + }, + 'eip155:3': { + methods: [], + notifications: [], + accounts: [], + }, + }, + ), + ).toStrictEqual({ + 'eip155:1': { + methods: ['a', 'b', 'c'], + notifications: ['foo'], + accounts: [], + }, + 'eip155:2': { + methods: ['c', 'd'], + notifications: ['bar'], + accounts: [], + }, + 'eip155:3': { + methods: [], + notifications: [], + accounts: [], + }, + }); + }); + it('returns an empty object when no scopes are provided', () => { + expect(mergeScopes({}, {})).toStrictEqual({}); + }); + + it('returns an unchanged scope when two identical scopeObjects are provided', () => { + expect( + mergeScopes( + { 'eip155:1': validScopeObject }, + { 'eip155:1': validScopeObject }, + ), + ).toStrictEqual({ 'eip155:1': validScopeObject }); + }); + }); + + describe('normalizeAndMergeScopes', () => { + it('normalizes scopes and merges any overlapping scopeStrings', () => { + expect( + normalizeAndMergeScopes({ + eip155: { + ...validScopeObject, + methods: ['a', 'b'], + references: ['1', '5'], + }, + 'eip155:1': { + ...validScopeObject, + methods: ['b', 'c', 'd'], + }, + }), + ).toStrictEqual({ + 'eip155:1': { + ...validScopeObject, + methods: ['a', 'b', 'c', 'd'], + }, + 'eip155:5': { + ...validScopeObject, + methods: ['a', 'b'], + }, + }); + }); + it('returns an empty object when no scopes are provided', () => { + expect(normalizeAndMergeScopes({})).toStrictEqual({}); + }); + it('return an unchanged scope when scopeObjects are already normalized (i.e. none contain references to flatten)', () => { + expect( + normalizeAndMergeScopes({ + 'eip155:1': validScopeObject, + 'eip155:2': validScopeObject, + 'eip155:3': validScopeObject, + }), + ).toStrictEqual({ + 'eip155:1': validScopeObject, + 'eip155:2': validScopeObject, + 'eip155:3': validScopeObject, + }); + }); + }); +}); diff --git a/packages/multichain/src/scope/transform.ts b/packages/multichain/src/scope/transform.ts new file mode 100644 index 0000000000..666ff740eb --- /dev/null +++ b/packages/multichain/src/scope/transform.ts @@ -0,0 +1,152 @@ +import type { CaipReference } from '@metamask/utils'; +import { cloneDeep } from 'lodash'; + +import type { + ExternalScopeObject, + ExternalScopesObject, + NormalizedScopeObject, + NormalizedScopesObject, +} from './types'; +import { parseScopeString } from './types'; + +/** + * Returns a list of unique items + * + * @param list - The list of items to filter + * @returns A list of unique items + */ +export const getUniqueArrayItems = (list: Value[]): Value[] => { + return Array.from(new Set(list)); +}; + +/** + * Normalizes a ScopeString and ExternalScopeObject into a separate + * InternalScopeString and NormalizedScopeObject for each reference in the `references` + * value if defined and adds an empty `accounts` array if not defined. + * + * @param scopeString - The string representing the scope + * @param externalScopeObject - The object that defines the scope + * @returns a map of caipChainId to ScopeObjects + */ +export const normalizeScope = ( + scopeString: string, + externalScopeObject: ExternalScopeObject, +): NormalizedScopesObject => { + const { references, ...scopeObject } = externalScopeObject; + const { namespace, reference } = parseScopeString(scopeString); + + const normalizedScopeObject: NormalizedScopeObject = { + accounts: [], + ...scopeObject, + }; + + const shouldFlatten = + namespace && + !reference && + references !== undefined && + references.length > 0; + + if (shouldFlatten) { + return Object.fromEntries( + references.map((ref: CaipReference) => [ + `${namespace}:${ref}`, + cloneDeep(normalizedScopeObject), + ]), + ); + } + return { [scopeString]: normalizedScopeObject }; +}; + +/** + * Merges two NormalizedScopeObjects + * @param scopeObjectA - The first scope object to merge. + * @param scopeObjectB - The second scope object to merge. + * @returns The merged scope object. + */ +export const mergeScopeObject = ( + scopeObjectA: NormalizedScopeObject, + scopeObjectB: NormalizedScopeObject, +) => { + const mergedScopeObject: NormalizedScopeObject = { + methods: getUniqueArrayItems([ + ...scopeObjectA.methods, + ...scopeObjectB.methods, + ]), + notifications: getUniqueArrayItems([ + ...scopeObjectA.notifications, + ...scopeObjectB.notifications, + ]), + accounts: getUniqueArrayItems([ + ...scopeObjectA.accounts, + ...scopeObjectB.accounts, + ]), + }; + + if (scopeObjectA.rpcDocuments || scopeObjectB.rpcDocuments) { + mergedScopeObject.rpcDocuments = getUniqueArrayItems([ + ...(scopeObjectA.rpcDocuments ?? []), + ...(scopeObjectB.rpcDocuments ?? []), + ]); + } + + if (scopeObjectA.rpcEndpoints || scopeObjectB.rpcEndpoints) { + mergedScopeObject.rpcEndpoints = getUniqueArrayItems([ + ...(scopeObjectA.rpcEndpoints ?? []), + ...(scopeObjectB.rpcEndpoints ?? []), + ]); + } + + return mergedScopeObject; +}; + +/** + * Merges two NormalizedScopeObjects + * @param scopeA - The first scope object to merge. + * @param scopeB - The second scope object to merge. + * @returns The merged scope object. + */ +export const mergeScopes = ( + scopeA: NormalizedScopesObject, + scopeB: NormalizedScopesObject, +): NormalizedScopesObject => { + const scope: NormalizedScopesObject = {}; + + Object.entries(scopeA).forEach(([_scopeString, scopeObjectA]) => { + // Cast needed because index type is returned as `string` by `Object.entries` + const scopeString = _scopeString as keyof typeof scopeA; + const scopeObjectB = scopeB[scopeString]; + + scope[scopeString] = scopeObjectB + ? mergeScopeObject(scopeObjectA, scopeObjectB) + : scopeObjectA; + }); + + Object.entries(scopeB).forEach(([_scopeString, scopeObjectB]) => { + // Cast needed because index type is returned as `string` by `Object.entries` + const scopeString = _scopeString as keyof typeof scopeB; + const scopeObjectA = scopeA[scopeString]; + + if (!scopeObjectA) { + scope[scopeString] = scopeObjectB; + } + }); + + return scope; +}; + +/** + * Normalizes and merges a set of ExternalScopesObjects into a NormalizedScopesObject (i.e. a set of NormalizedScopeObjects where references are flattened). + * @param scopes - The external scopes to normalize and merge. + * @returns The normalized and merged scopes. + */ +export const normalizeAndMergeScopes = ( + scopes: ExternalScopesObject, +): NormalizedScopesObject => { + let mergedScopes: NormalizedScopesObject = {}; + Object.keys(scopes).forEach((scopeString) => { + const normalizedScopes = normalizeScope(scopeString, scopes[scopeString]); + mergedScopes = mergeScopes(mergedScopes, normalizedScopes); + }); + + return mergedScopes; +}; diff --git a/packages/multichain/src/scope/types.test.ts b/packages/multichain/src/scope/types.test.ts new file mode 100644 index 0000000000..1b6149b3f2 --- /dev/null +++ b/packages/multichain/src/scope/types.test.ts @@ -0,0 +1,23 @@ +import { parseScopeString } from './types'; + +describe('Scope', () => { + describe('parseScopeString', () => { + it('returns only the namespace if scopeString is namespace', () => { + expect(parseScopeString('abc')).toStrictEqual({ namespace: 'abc' }); + }); + + it('returns the namespace and reference if scopeString is a CAIP chain ID', () => { + expect(parseScopeString('abc:foo')).toStrictEqual({ + namespace: 'abc', + reference: 'foo', + }); + }); + + it('returns empty object if scopeString is invalid', () => { + expect(parseScopeString('')).toStrictEqual({}); + expect(parseScopeString('a:')).toStrictEqual({}); + expect(parseScopeString(':b')).toStrictEqual({}); + expect(parseScopeString('a:b:c')).toStrictEqual({}); + }); + }); +}); diff --git a/packages/multichain/src/scope/types.ts b/packages/multichain/src/scope/types.ts new file mode 100644 index 0000000000..f639c58d2e --- /dev/null +++ b/packages/multichain/src/scope/types.ts @@ -0,0 +1,119 @@ +import { + isCaipNamespace, + isCaipChainId, + parseCaipChainId, +} from '@metamask/utils'; +import type { + CaipChainId, + CaipReference, + CaipAccountId, + KnownCaipNamespace, + CaipNamespace, + Json, +} from '@metamask/utils'; + +/** + * Represents a `scopeString` as defined in [CAIP-217](https://chainagnostic.org/CAIPs/caip-217). + */ +export type ExternalScopeString = CaipChainId | CaipNamespace; +/** + * Represents a `scopeObject` as defined in [CAIP-217](https://chainagnostic.org/CAIPs/caip-217). + */ +export type ExternalScopeObject = Omit & { + references?: CaipReference[]; + accounts?: CaipAccountId[]; +}; +/** + * Represents a `scope` as defined in [CAIP-217](https://chainagnostic.org/CAIPs/caip-217). + * TODO update the language in CAIP-217 to use "scope" instead of "scopeObject" for this full record type. + */ +export type ExternalScopesObject = Record< + ExternalScopeString, + ExternalScopeObject +>; + +/** + * Represents a `scopeString` as defined in + * [CAIP-217](https://chainagnostic.org/CAIPs/caip-217), with the exception that + * CAIP namespaces without a reference (aside from "wallet") are disallowed for our internal representations of CAIP-25 session scopes + */ +export type InternalScopeString = CaipChainId | KnownCaipNamespace.Wallet; + +/** + * A trimmed down version of a [CAIP-217](https://chainagnostic.org/CAIPs/caip-217) defined scopeObject that is stored in a `endowment:caip25` permission. + * The only property from the original CAIP-25 scopeObject that we use for permissioning is `accounts`. + */ +export type InternalScopeObject = { + accounts: CaipAccountId[]; +}; + +/** + * A trimmed down version of a [CAIP-217](https://chainagnostic.org/CAIPs/caip-217) scope that is stored in a `endowment:caip25` permission. + * Accounts arrays are mapped to CAIP-2 chainIds. These are currently the only properties used by the permission system. + */ +export type InternalScopesObject = Record & { + [KnownCaipNamespace.Wallet]?: InternalScopeObject; +}; + +/** + * Represents a `scopeObject` as defined in + * [CAIP-217](https://chainagnostic.org/CAIPs/caip-217), with the exception that + * we resolve the `references` property into a scopeObject per reference and + * assign an empty array to the `accounts` property if not already defined + * to more easily read chain specific permissions. + */ +export type NormalizedScopeObject = { + methods: string[]; + notifications: string[]; + accounts: CaipAccountId[]; + rpcDocuments?: string[]; + rpcEndpoints?: string[]; +}; +/** + * Represents a keyed `scopeObject` as defined in + * [CAIP-217](https://chainagnostic.org/CAIPs/caip-217), with the exception that + * we resolve the `references` property into a scopeObject per reference and + * assign an empty array to the `accounts` property if not already defined + * to more easily read chain specific permissions. + */ +export type NormalizedScopesObject = Record< + CaipChainId, + NormalizedScopeObject +> & { + [KnownCaipNamespace.Wallet]?: NormalizedScopeObject; +}; + +export type ScopedProperties = Record> & { + [KnownCaipNamespace.Wallet]?: Record; +}; + +/** + * Parses a scope string into a namespace and reference. + * @param scopeString - The scope string to parse. + * @returns An object containing the namespace and reference. + */ +export const parseScopeString = ( + scopeString: string, +): { + namespace?: string; + reference?: string; +} => { + if (isCaipNamespace(scopeString)) { + return { + namespace: scopeString, + }; + } + if (isCaipChainId(scopeString)) { + return parseCaipChainId(scopeString); + } + + return {}; +}; + +/** + * CAIP namespaces excluding "wallet" currently supported by/known to the wallet. + */ +export type NonWalletKnownCaipNamespace = Exclude< + KnownCaipNamespace, + KnownCaipNamespace.Wallet +>; diff --git a/packages/multichain/src/scope/validation.test.ts b/packages/multichain/src/scope/validation.test.ts new file mode 100644 index 0000000000..6871b01069 --- /dev/null +++ b/packages/multichain/src/scope/validation.test.ts @@ -0,0 +1,179 @@ +import type { ExternalScopeObject } from './types'; +import { isValidScope, getValidScopes } from './validation'; + +const validScopeString = 'eip155:1'; +const validScopeObject: ExternalScopeObject = { + methods: [], + notifications: [], +}; + +describe('Scope Validation', () => { + describe('isValidScope', () => { + it('returns false when the scopeString is neither a CAIP namespace or CAIP chainId', () => { + expect( + isValidScope('not a namespace or a caip chain id', validScopeObject), + ).toBe(false); + }); + + it('returns true when the scopeString is "wallet" and the scopeObject does not contain references', () => { + expect(isValidScope('wallet', validScopeObject)).toBe(true); + }); + + it('returns true when the scopeString is a valid CAIP chainId and the scopeObject is valid', () => { + expect(isValidScope('eip155:1', validScopeObject)).toBe(true); + }); + + it('returns false when the scopeString is a valid CAIP namespace but references are invalid CAIP references', () => { + expect( + isValidScope('eip155', { + ...validScopeObject, + references: ['@'], + }), + ).toBe(false); + }); + + it('returns false when the scopeString is a CAIP chainId but references is defined', () => { + expect( + isValidScope('eip155:1', { + ...validScopeObject, + references: [], + }), + ).toBe(false); + }); + + it('returns false when the scopeString is a valid CAIP namespace (other than "wallet") but references is an empty array', () => { + expect( + isValidScope('eip155', { ...validScopeObject, references: [] }), + ).toBe(false); + }); + + it('returns false when the scopeString is a valid CAIP namespace (other than "wallet") but references is undefined', () => { + expect(isValidScope('eip155', validScopeObject)).toBe(false); + }); + + it('returns false when methods contains empty string', () => { + expect( + isValidScope(validScopeString, { + ...validScopeObject, + methods: [''], + }), + ).toBe(false); + }); + + it('returns false when methods contains non-string', () => { + expect( + isValidScope(validScopeString, { + ...validScopeObject, + // @ts-expect-error Intentionally invalid input + methods: [{ foo: 'bar' }], + }), + ).toBe(false); + }); + + it('returns true when methods contains only strings', () => { + expect( + isValidScope(validScopeString, { + ...validScopeObject, + methods: ['method1', 'method2'], + }), + ).toBe(true); + }); + + it('returns false when notifications contains empty string', () => { + expect( + isValidScope(validScopeString, { + ...validScopeObject, + notifications: [''], + }), + ).toBe(false); + }); + + it('returns false when notifications contains non-string', () => { + expect( + isValidScope(validScopeString, { + ...validScopeObject, + // @ts-expect-error Intentionally invalid input + notifications: [{ foo: 'bar' }], + }), + ).toBe(false); + }); + + it('returns false when unexpected properties are defined', () => { + expect( + isValidScope(validScopeString, { + ...validScopeObject, + // @ts-expect-error Intentionally invalid input + unexpectedParam: 'foobar', + }), + ).toBe(false); + }); + + it('returns true when only expected properties are defined', () => { + expect( + isValidScope(validScopeString, { + methods: [], + notifications: [], + accounts: [], + rpcDocuments: [], + rpcEndpoints: [], + }), + ).toBe(true); + + expect( + isValidScope('eip155', { + ...validScopeObject, + references: ['1'], + }), + ).toBe(true); + }); + }); + + describe('getValidScopes', () => { + const validScopeObjectWithAccounts = { + ...validScopeObject, + accounts: [], + }; + + it('does not throw an error if required scopes are defined but none are valid', () => { + expect( + getValidScopes( + // @ts-expect-error Intentionally invalid input + { 'eip155:1': {} }, + undefined, + ), + ).toStrictEqual({ validRequiredScopes: {}, validOptionalScopes: {} }); + }); + + it('does not throw an error if optional scopes are defined but none are valid', () => { + expect( + getValidScopes(undefined, { + // @ts-expect-error Intentionally invalid input + 'eip155:1': {}, + }), + ).toStrictEqual({ validRequiredScopes: {}, validOptionalScopes: {} }); + }); + + it('returns the valid required and optional scopes', () => { + expect( + getValidScopes( + { + 'eip155:1': validScopeObjectWithAccounts, + // @ts-expect-error Intentionally invalid input + 'eip155:64': {}, + }, + { + 'eip155:2': {}, + 'eip155:5': validScopeObjectWithAccounts, + }, + ), + ).toStrictEqual({ + validRequiredScopes: { + 'eip155:1': validScopeObjectWithAccounts, + }, + validOptionalScopes: { + 'eip155:5': validScopeObjectWithAccounts, + }, + }); + }); + }); +}); diff --git a/packages/multichain/src/scope/validation.ts b/packages/multichain/src/scope/validation.ts new file mode 100644 index 0000000000..26e96fdc65 --- /dev/null +++ b/packages/multichain/src/scope/validation.ts @@ -0,0 +1,129 @@ +import { isCaipReference } from '@metamask/utils'; + +import type { + ExternalScopeString, + ExternalScopeObject, + ExternalScopesObject, +} from './types'; +import { parseScopeString } from './types'; + +/** + * Validates a scope object according to the [CAIP-217](https://chainagnostic.org/CAIPs/caip-217) spec. + * @param scopeString - The scope string to validate. + * @param scopeObject - The scope object to validate. + * @returns A boolean indicating if the scope object is valid according to the [CAIP-217](https://chainagnostic.org/CAIPs/caip-217) spec. + */ +export const isValidScope = ( + scopeString: ExternalScopeString, + scopeObject: ExternalScopeObject, +): boolean => { + const { namespace, reference } = parseScopeString(scopeString); + + // Namespace is required + if (!namespace) { + return false; + } + + const { + references, + methods, + notifications, + accounts, + rpcDocuments, + rpcEndpoints, + ...extraProperties + } = scopeObject; + + // Methods and notifications are required + if (!methods || !notifications) { + return false; + } + + // For namespaces other than 'wallet', either reference or non-empty references array must be present + if ( + namespace !== 'wallet' && + !reference && + (!references || references.length === 0) + ) { + return false; + } + + // If references are present, reference must be absent and all references must be valid + if (references) { + if (reference) { + return false; + } + + const areReferencesValid = references.every((nestedReference) => + isCaipReference(nestedReference), + ); + + if (!areReferencesValid) { + return false; + } + } + + const areMethodsValid = methods.every( + (method) => typeof method === 'string' && method.trim() !== '', + ); + + if (!areMethodsValid) { + return false; + } + + const areNotificationsValid = notifications.every( + (notification) => + typeof notification === 'string' && notification.trim() !== '', + ); + + if (!areNotificationsValid) { + return false; + } + + // Ensure no unexpected properties are present in the scope object + if (Object.keys(extraProperties).length > 0) { + return false; + } + + return true; +}; + +/** + * Filters out invalid scopes and returns valid sets of required and optional scopes according to the [CAIP-217](https://chainagnostic.org/CAIPs/caip-217) spec. + * @param requiredScopes - The required scopes to validate. + * @param optionalScopes - The optional scopes to validate. + * @returns An object containing valid required scopes and optional scopes. + */ +export const getValidScopes = ( + requiredScopes?: ExternalScopesObject, + optionalScopes?: ExternalScopesObject, +) => { + const validRequiredScopes: ExternalScopesObject = {}; + for (const [scopeString, scopeObject] of Object.entries( + requiredScopes || {}, + )) { + if (isValidScope(scopeString, scopeObject)) { + validRequiredScopes[scopeString] = { + accounts: [], + ...scopeObject, + }; + } + } + + const validOptionalScopes: ExternalScopesObject = {}; + for (const [scopeString, scopeObject] of Object.entries( + optionalScopes || {}, + )) { + if (isValidScope(scopeString, scopeObject)) { + validOptionalScopes[scopeString] = { + accounts: [], + ...scopeObject, + }; + } + } + + return { + validRequiredScopes, + validOptionalScopes, + }; +}; diff --git a/packages/multichain/tsconfig.build.json b/packages/multichain/tsconfig.build.json index 02a0eea03f..f2108df276 100644 --- a/packages/multichain/tsconfig.build.json +++ b/packages/multichain/tsconfig.build.json @@ -3,8 +3,16 @@ "compilerOptions": { "baseUrl": "./", "outDir": "./dist", - "rootDir": "./src" + "rootDir": "./src", + "resolveJsonModule": true }, - "references": [], + "references": [ + { + "path": "../network-controller/tsconfig.build.json" + }, + { + "path": "../permission-controller/tsconfig.build.json" + } + ], "include": ["../../types", "./src"] } diff --git a/packages/multichain/tsconfig.json b/packages/multichain/tsconfig.json index 025ba2ef7f..34e1d4a721 100644 --- a/packages/multichain/tsconfig.json +++ b/packages/multichain/tsconfig.json @@ -3,6 +3,13 @@ "compilerOptions": { "baseUrl": "./" }, - "references": [], + "references": [ + { + "path": "../network-controller" + }, + { + "path": "../permission-controller" + } + ], "include": ["../../types", "./src"] } diff --git a/packages/notification-services-controller/CHANGELOG.md b/packages/notification-services-controller/CHANGELOG.md index 5e2aa5da30..c9113b21d4 100644 --- a/packages/notification-services-controller/CHANGELOG.md +++ b/packages/notification-services-controller/CHANGELOG.md @@ -7,6 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.14.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-controller` peer dependency from `^18.0.0` to `^19.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) +- **BREAKING:** Bump `@metamask/profile-sync-controller` peer dependency from `^1.0.0` to `^2.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) + ## [0.13.0] ### Changed @@ -247,7 +254,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Initial release -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/notification-services-controller@0.13.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/notification-services-controller@0.14.0...HEAD +[0.14.0]: https://github.com/MetaMask/core/compare/@metamask/notification-services-controller@0.13.0...@metamask/notification-services-controller@0.14.0 [0.13.0]: https://github.com/MetaMask/core/compare/@metamask/notification-services-controller@0.12.1...@metamask/notification-services-controller@0.13.0 [0.12.1]: https://github.com/MetaMask/core/compare/@metamask/notification-services-controller@0.12.0...@metamask/notification-services-controller@0.12.1 [0.12.0]: https://github.com/MetaMask/core/compare/@metamask/notification-services-controller@0.11.0...@metamask/notification-services-controller@0.12.0 diff --git a/packages/notification-services-controller/package.json b/packages/notification-services-controller/package.json index f80f8d0a84..9dc69409ce 100644 --- a/packages/notification-services-controller/package.json +++ b/packages/notification-services-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/notification-services-controller", - "version": "0.13.0", + "version": "0.14.0", "description": "Manages New MetaMask decentralized Notification system", "keywords": [ "MetaMask", @@ -111,8 +111,8 @@ "devDependencies": { "@lavamoat/allow-scripts": "^3.0.4", "@metamask/auto-changelog": "^3.4.4", - "@metamask/keyring-controller": "^18.0.0", - "@metamask/profile-sync-controller": "^1.0.0", + "@metamask/keyring-controller": "^19.0.0", + "@metamask/profile-sync-controller": "^2.0.0", "@types/jest": "^27.4.1", "@types/readable-stream": "^2.3.0", "contentful": "^10.15.0", @@ -126,8 +126,8 @@ "typescript": "~5.2.2" }, "peerDependencies": { - "@metamask/keyring-controller": "^18.0.0", - "@metamask/profile-sync-controller": "^1.0.0" + "@metamask/keyring-controller": "^19.0.0", + "@metamask/profile-sync-controller": "^2.0.0" }, "engines": { "node": "^18.18 || >=20" diff --git a/packages/preferences-controller/CHANGELOG.md b/packages/preferences-controller/CHANGELOG.md index c1b301a341..6b4b45aa67 100644 --- a/packages/preferences-controller/CHANGELOG.md +++ b/packages/preferences-controller/CHANGELOG.md @@ -7,6 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [15.0.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-controller` peer dependency from `^18.0.0` to `^19.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) + ## [14.0.0] ### Changed @@ -321,7 +327,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 All changes listed after this point were applied to this package following the monorepo conversion. -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/preferences-controller@14.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/preferences-controller@15.0.0...HEAD +[15.0.0]: https://github.com/MetaMask/core/compare/@metamask/preferences-controller@14.0.0...@metamask/preferences-controller@15.0.0 [14.0.0]: https://github.com/MetaMask/core/compare/@metamask/preferences-controller@13.3.0...@metamask/preferences-controller@14.0.0 [13.3.0]: https://github.com/MetaMask/core/compare/@metamask/preferences-controller@13.2.0...@metamask/preferences-controller@13.3.0 [13.2.0]: https://github.com/MetaMask/core/compare/@metamask/preferences-controller@13.1.0...@metamask/preferences-controller@13.2.0 diff --git a/packages/preferences-controller/package.json b/packages/preferences-controller/package.json index bcebd59c43..93f366a8e3 100644 --- a/packages/preferences-controller/package.json +++ b/packages/preferences-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/preferences-controller", - "version": "14.0.0", + "version": "15.0.0", "description": "Manages user-configurable settings for MetaMask", "keywords": [ "MetaMask", @@ -52,7 +52,7 @@ }, "devDependencies": { "@metamask/auto-changelog": "^3.4.4", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-controller": "^19.0.0", "@types/jest": "^27.4.1", "deepmerge": "^4.2.2", "jest": "^27.5.1", @@ -63,7 +63,7 @@ "typescript": "~5.2.2" }, "peerDependencies": { - "@metamask/keyring-controller": "^18.0.0" + "@metamask/keyring-controller": "^19.0.0" }, "engines": { "node": "^18.18 || >=20" diff --git a/packages/profile-sync-controller/CHANGELOG.md b/packages/profile-sync-controller/CHANGELOG.md index c20e028436..9129585fc1 100644 --- a/packages/profile-sync-controller/CHANGELOG.md +++ b/packages/profile-sync-controller/CHANGELOG.md @@ -7,6 +7,39 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [2.0.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-controller` peer dependency from `^18.0.0` to `^19.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) +- **BREAKING:** Bump `@metamask/accounts-controller` peer dependency from `^19.0.0` to `^20.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) + +## [1.0.2] + +### Added + +- new analytics callback and various helpers & improvements ([#4944](https://github.com/MetaMask/core/pull/4944)) + - new `UserStorageController` state keys: `hasAccountSyncingSyncedAtLeastOnce` and `isAccountSyncingReadyToBeDispatched` + - new `onAccountSyncErroneousSituation` analytics callback to track how often erroneous situations happen during account syncing + +### Changed + +- set `hasAccountSyncingSyncedAtLeastOnce` also for a profile id that has never synced accounts before ([#4944](https://github.com/MetaMask/core/pull/4944)) + +## [1.0.1] + +### Added + +- add batch delete endpoint support for both UserStorageController & SDK ([#4938](https://github.com/MetaMask/core/pull/4938)) + +### Changed + +- use better type system for user storage ([#4907](https://github.com/MetaMask/core/pull/4907)) + +### Fixed + +- account sync infinite account creation bug ([#4933](https://github.com/MetaMask/core/pull/4933)) + ## [1.0.0] ### Changed @@ -295,7 +328,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Initial release -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@1.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@2.0.0...HEAD +[2.0.0]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@1.0.2...@metamask/profile-sync-controller@2.0.0 +[1.0.2]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@1.0.1...@metamask/profile-sync-controller@1.0.2 +[1.0.1]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@1.0.0...@metamask/profile-sync-controller@1.0.1 [1.0.0]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@0.9.8...@metamask/profile-sync-controller@1.0.0 [0.9.8]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@0.9.7...@metamask/profile-sync-controller@0.9.8 [0.9.7]: https://github.com/MetaMask/core/compare/@metamask/profile-sync-controller@0.9.6...@metamask/profile-sync-controller@0.9.7 diff --git a/packages/profile-sync-controller/package.json b/packages/profile-sync-controller/package.json index dceafce99a..23c2e7b73f 100644 --- a/packages/profile-sync-controller/package.json +++ b/packages/profile-sync-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/profile-sync-controller", - "version": "1.0.0", + "version": "2.0.0", "description": "The profile sync helps developers synchronize data across multiple clients and devices in a privacy-preserving way. All data saved in the user storage database is encrypted client-side to preserve privacy. The user storage provides a modular design, giving developers the flexibility to construct and manage their storage spaces in a way that best suits their needs", "keywords": [ "MetaMask", @@ -101,11 +101,11 @@ }, "dependencies": { "@metamask/base-controller": "^7.0.2", - "@metamask/keyring-api": "^8.1.3", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-api": "^10.1.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/network-controller": "^22.0.2", - "@metamask/snaps-sdk": "^6.5.0", - "@metamask/snaps-utils": "^8.1.1", + "@metamask/snaps-sdk": "^6.7.0", + "@metamask/snaps-utils": "^8.3.0", "@noble/ciphers": "^0.5.2", "@noble/hashes": "^1.4.0", "immer": "^9.0.6", @@ -114,9 +114,9 @@ }, "devDependencies": { "@lavamoat/allow-scripts": "^3.0.4", - "@metamask/accounts-controller": "^19.0.0", + "@metamask/accounts-controller": "^20.0.0", "@metamask/auto-changelog": "^3.4.4", - "@metamask/snaps-controllers": "^9.7.0", + "@metamask/snaps-controllers": "^9.10.0", "@types/jest": "^27.4.1", "deepmerge": "^4.2.2", "ethers": "^6.12.0", @@ -129,10 +129,10 @@ "typescript": "~5.2.2" }, "peerDependencies": { - "@metamask/accounts-controller": "^19.0.0", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/accounts-controller": "^20.0.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/network-controller": "^22.0.0", - "@metamask/snaps-controllers": "^9.7.0" + "@metamask/snaps-controllers": "^9.10.0" }, "engines": { "node": "^18.18 || >=20" diff --git a/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.test.ts b/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.test.ts index 1a753acd99..70d65648a6 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.test.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.test.ts @@ -3,6 +3,7 @@ import type { InternalAccount } from '@metamask/keyring-api'; import type nock from 'nock'; import encryption, { createSHA256Hash } from '../../shared/encryption'; +import { USER_STORAGE_FEATURE_NAMES } from '../../shared/storage-schema'; import type { AuthenticationControllerGetBearerToken, AuthenticationControllerGetSessionProfile, @@ -20,6 +21,7 @@ import { mockEndpointUpsertUserStorage, mockEndpointDeleteUserStorageAllFeatureEntries, mockEndpointDeleteUserStorage, + mockEndpointBatchDeleteUserStorage, } from './__fixtures__/mockServices'; import { MOCK_STORAGE_DATA, @@ -76,7 +78,7 @@ describe('user-storage/user-storage-controller - performGetStorage() tests', () }); const result = await controller.performGetStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ); mockAPI.done(); expect(result).toBe(MOCK_STORAGE_DATA); @@ -90,11 +92,15 @@ describe('user-storage/user-storage-controller - performGetStorage() tests', () state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); await expect( - controller.performGetStorage('notifications.notification_settings'), + controller.performGetStorage( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(expect.any(Error)); }); @@ -129,7 +135,9 @@ describe('user-storage/user-storage-controller - performGetStorage() tests', () }); await expect( - controller.performGetStorage('notifications.notification_settings'), + controller.performGetStorage( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(expect.any(Error)); }, ); @@ -165,11 +173,15 @@ describe('user-storage/user-storage-controller - performGetStorageAllFeatureEntr state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); await expect( - controller.performGetStorageAllFeatureEntries('notifications'), + controller.performGetStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + ), ).rejects.toThrow(expect.any(Error)); }); @@ -204,7 +216,9 @@ describe('user-storage/user-storage-controller - performGetStorageAllFeatureEntr }); await expect( - controller.performGetStorageAllFeatureEntries('notifications'), + controller.performGetStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + ), ).rejects.toThrow(expect.any(Error)); }, ); @@ -226,7 +240,7 @@ describe('user-storage/user-storage-controller - performSetStorage() tests', () }); await controller.performSetStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, 'new data', ); expect(mockAPI.isDone()).toBe(true); @@ -240,12 +254,14 @@ describe('user-storage/user-storage-controller - performSetStorage() tests', () state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); await expect( controller.performSetStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, 'new data', ), ).rejects.toThrow(expect.any(Error)); @@ -283,7 +299,7 @@ describe('user-storage/user-storage-controller - performSetStorage() tests', () await expect( controller.performSetStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, 'new data', ), ).rejects.toThrow(expect.any(Error)); @@ -293,7 +309,7 @@ describe('user-storage/user-storage-controller - performSetStorage() tests', () it('rejects if api call fails', async () => { const { messengerMocks } = arrangeMocks({ mockAPI: mockEndpointUpsertUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 500 }, ), }); @@ -303,7 +319,7 @@ describe('user-storage/user-storage-controller - performSetStorage() tests', () }); await expect( controller.performSetStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, 'new data', ), ).rejects.toThrow(expect.any(Error)); @@ -315,7 +331,7 @@ describe('user-storage/user-storage-controller - performBatchSetStorage() tests' return { messengerMocks: mockUserStorageMessenger(), mockAPI: mockEndpointBatchUpsertUserStorage( - 'notifications', + USER_STORAGE_FEATURE_NAMES.notifications, mockResponseStatus ? { status: mockResponseStatus } : undefined, ), }; @@ -328,8 +344,111 @@ describe('user-storage/user-storage-controller - performBatchSetStorage() tests' getMetaMetricsState: () => true, }); - await controller.performBatchSetStorage('notifications', [ - ['notifications.notification_settings', 'new data'], + await controller.performBatchSetStorage( + USER_STORAGE_FEATURE_NAMES.notifications, + [['notification_settings', 'new data']], + ); + expect(mockAPI.isDone()).toBe(true); + }); + + it('rejects if UserStorage is not enabled', async () => { + const { messengerMocks } = arrangeMocks(); + const controller = new UserStorageController({ + messenger: messengerMocks.messenger, + getMetaMetricsState: () => true, + state: { + isProfileSyncingEnabled: false, + isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, + }, + }); + + await expect( + controller.performBatchSetStorage( + USER_STORAGE_FEATURE_NAMES.notifications, + [['notification_settings', 'new data']], + ), + ).rejects.toThrow(expect.any(Error)); + }); + + it.each([ + [ + 'fails when no bearer token is found (auth errors)', + (messengerMocks: ReturnType) => + messengerMocks.mockAuthGetBearerToken.mockRejectedValue( + new Error('MOCK FAILURE'), + ), + ], + [ + 'fails when no session identifier is found (auth errors)', + (messengerMocks: ReturnType) => + messengerMocks.mockAuthGetSessionProfile.mockRejectedValue( + new Error('MOCK FAILURE'), + ), + ], + ])( + 'rejects on auth failure - %s', + async ( + _: string, + arrangeFailureCase: ( + messengerMocks: ReturnType, + ) => void, + ) => { + const { messengerMocks } = arrangeMocks(); + arrangeFailureCase(messengerMocks); + const controller = new UserStorageController({ + messenger: messengerMocks.messenger, + getMetaMetricsState: () => true, + }); + + await expect( + controller.performBatchSetStorage( + USER_STORAGE_FEATURE_NAMES.notifications, + [['notification_settings', 'new data']], + ), + ).rejects.toThrow(expect.any(Error)); + }, + ); + + it('rejects if api call fails', async () => { + const { messengerMocks, mockAPI } = arrangeMocks(500); + const controller = new UserStorageController({ + messenger: messengerMocks.messenger, + getMetaMetricsState: () => true, + }); + + await expect( + controller.performBatchSetStorage( + USER_STORAGE_FEATURE_NAMES.notifications, + [['notification_settings', 'new data']], + ), + ).rejects.toThrow(expect.any(Error)); + mockAPI.done(); + }); +}); + +describe('user-storage/user-storage-controller - performBatchDeleteStorage() tests', () => { + const arrangeMocks = (mockResponseStatus?: number) => { + return { + messengerMocks: mockUserStorageMessenger(), + mockAPI: mockEndpointBatchDeleteUserStorage( + 'notifications', + mockResponseStatus ? { status: mockResponseStatus } : undefined, + ), + }; + }; + + it('batch deletes entries in user storage', async () => { + const { messengerMocks, mockAPI } = arrangeMocks(); + const controller = new UserStorageController({ + messenger: messengerMocks.messenger, + getMetaMetricsState: () => true, + }); + + await controller.performBatchDeleteStorage('notifications', [ + 'notification_settings', + 'notification_settings', ]); expect(mockAPI.isDone()).toBe(true); }); @@ -342,12 +461,15 @@ describe('user-storage/user-storage-controller - performBatchSetStorage() tests' state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); await expect( - controller.performBatchSetStorage('notifications', [ - ['notifications.notification_settings', 'new data'], + controller.performBatchDeleteStorage('notifications', [ + 'notification_settings', + 'notification_settings', ]), ).rejects.toThrow(expect.any(Error)); }); @@ -383,8 +505,9 @@ describe('user-storage/user-storage-controller - performBatchSetStorage() tests' }); await expect( - controller.performBatchSetStorage('notifications', [ - ['notifications.notification_settings', 'new data'], + controller.performBatchDeleteStorage('notifications', [ + 'notification_settings', + 'notification_settings', ]), ).rejects.toThrow(expect.any(Error)); }, @@ -398,8 +521,9 @@ describe('user-storage/user-storage-controller - performBatchSetStorage() tests' }); await expect( - controller.performBatchSetStorage('notifications', [ - ['notifications.notification_settings', 'new data'], + controller.performBatchDeleteStorage('notifications', [ + 'notification_settings', + 'notification_settings', ]), ).rejects.toThrow(expect.any(Error)); mockAPI.done(); @@ -411,7 +535,7 @@ describe('user-storage/user-storage-controller - performDeleteStorage() tests', return { messengerMocks: mockUserStorageMessenger(), mockAPI: mockEndpointDeleteUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, mockResponseStatus ? { status: mockResponseStatus } : undefined, ), }; @@ -425,7 +549,7 @@ describe('user-storage/user-storage-controller - performDeleteStorage() tests', }); await controller.performDeleteStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ); mockAPI.done(); @@ -440,11 +564,15 @@ describe('user-storage/user-storage-controller - performDeleteStorage() tests', state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); await expect( - controller.performDeleteStorage('notifications.notification_settings'), + controller.performDeleteStorage( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(expect.any(Error)); }); @@ -479,7 +607,9 @@ describe('user-storage/user-storage-controller - performDeleteStorage() tests', }); await expect( - controller.performDeleteStorage('notifications.notification_settings'), + controller.performDeleteStorage( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(expect.any(Error)); }, ); @@ -492,7 +622,9 @@ describe('user-storage/user-storage-controller - performDeleteStorage() tests', }); await expect( - controller.performDeleteStorage('notifications.notification_settings'), + controller.performDeleteStorage( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(expect.any(Error)); mockAPI.done(); }); @@ -503,7 +635,7 @@ describe('user-storage/user-storage-controller - performDeleteStorageAllFeatureE return { messengerMocks: mockUserStorageMessenger(), mockAPI: mockEndpointDeleteUserStorageAllFeatureEntries( - 'notifications', + USER_STORAGE_FEATURE_NAMES.notifications, mockResponseStatus ? { status: mockResponseStatus } : undefined, ), }; @@ -516,7 +648,9 @@ describe('user-storage/user-storage-controller - performDeleteStorageAllFeatureE getMetaMetricsState: () => true, }); - await controller.performDeleteStorageAllFeatureEntries('notifications'); + await controller.performDeleteStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + ); mockAPI.done(); expect(mockAPI.isDone()).toBe(true); @@ -530,11 +664,15 @@ describe('user-storage/user-storage-controller - performDeleteStorageAllFeatureE state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); await expect( - controller.performDeleteStorageAllFeatureEntries('notifications'), + controller.performDeleteStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + ), ).rejects.toThrow(expect.any(Error)); }); @@ -569,7 +707,9 @@ describe('user-storage/user-storage-controller - performDeleteStorageAllFeatureE }); await expect( - controller.performDeleteStorageAllFeatureEntries('notifications'), + controller.performDeleteStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + ), ).rejects.toThrow(expect.any(Error)); }, ); @@ -582,7 +722,9 @@ describe('user-storage/user-storage-controller - performDeleteStorageAllFeatureE }); await expect( - controller.performDeleteStorageAllFeatureEntries('notifications'), + controller.performDeleteStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + ), ).rejects.toThrow(expect.any(Error)); mockAPI.done(); }); @@ -614,6 +756,8 @@ describe('user-storage/user-storage-controller - getStorageKey() tests', () => { state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); @@ -658,6 +802,8 @@ describe('user-storage/user-storage-controller - enableProfileSyncing() tests', state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); @@ -688,6 +834,8 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); @@ -702,7 +850,9 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto messengerMocks: mockUserStorageMessenger(), mockAPI: { mockEndpointGetUserStorage: - await mockEndpointGetUserStorageAllFeatureEntries('accounts'), + await mockEndpointGetUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + ), }, }; }; @@ -740,7 +890,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), }, @@ -782,12 +932,12 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, mockUserStorageAccountsResponse, ), mockEndpointBatchUpsertUserStorage: mockEndpointBatchUpsertUserStorage( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, undefined, async (_uri, requestBody) => { const decryptedBody = await decryptBatchUpsertBody( @@ -826,7 +976,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto expect(mockAPI.mockEndpointBatchUpsertUserStorage.isDone()).toBe(true); }); - it('creates internal accounts if user storage has more accounts', async () => { + it('creates internal accounts if user storage has more accounts. it also updates hasAccountSyncingSyncedAtLeastOnce accordingly', async () => { const mockUserStorageAccountsResponse = async () => { return { status: 200, @@ -846,9 +996,32 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), + mockEndpointBatchDeleteUserStorage: + mockEndpointBatchDeleteUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + undefined, + async (_uri, requestBody) => { + if (typeof requestBody === 'string') { + return; + } + + const expectedBody = createExpectedAccountSyncBatchDeleteBody( + MOCK_USER_STORAGE_ACCOUNTS.SAME_AS_INTERNAL_ALL.filter( + (account) => + !MOCK_INTERNAL_ACCOUNTS.ONE.find( + (internalAccount) => + internalAccount.address === account.a, + ), + ).map((account) => account.a), + MOCK_STORAGE_KEY, + ); + + expect(requestBody.batch_delete).toStrictEqual(expectedBody); + }, + ), }, }; }; @@ -872,6 +1045,102 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto MOCK_USER_STORAGE_ACCOUNTS.SAME_AS_INTERNAL_ALL.length - MOCK_INTERNAL_ACCOUNTS.ONE.length, ); + + expect(mockAPI.mockEndpointBatchDeleteUserStorage.isDone()).toBe(true); + + expect(controller.state.hasAccountSyncingSyncedAtLeastOnce).toBe(true); + }); + + describe('handles corrupted user storage gracefully', () => { + const mockUserStorageAccountsResponse = async () => { + return { + status: 200, + body: await createMockUserStorageEntries( + MOCK_USER_STORAGE_ACCOUNTS.TWO_DEFAULT_NAMES_WITH_ONE_BOGUS, + ), + }; + }; + + const arrangeMocksForBogusAccounts = async () => { + return { + messengerMocks: mockUserStorageMessenger({ + accounts: { + accountsList: + MOCK_INTERNAL_ACCOUNTS.ONE_DEFAULT_NAME as InternalAccount[], + }, + }), + mockAPI: { + mockEndpointGetUserStorage: + await mockEndpointGetUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + await mockUserStorageAccountsResponse(), + ), + mockEndpointBatchDeleteUserStorage: + mockEndpointBatchDeleteUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + undefined, + async (_uri, requestBody) => { + if (typeof requestBody === 'string') { + return; + } + + const expectedBody = createExpectedAccountSyncBatchDeleteBody( + [ + MOCK_USER_STORAGE_ACCOUNTS + .TWO_DEFAULT_NAMES_WITH_ONE_BOGUS[1].a, + ], + MOCK_STORAGE_KEY, + ); + + expect(requestBody.batch_delete).toStrictEqual(expectedBody); + }, + ), + mockEndpointBatchUpsertUserStorage: + mockEndpointBatchUpsertUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + ), + }, + }; + }; + + it('does not save the bogus account to user storage, and deletes it from user storage', async () => { + const { messengerMocks, mockAPI } = await arrangeMocksForBogusAccounts(); + const controller = new UserStorageController({ + messenger: messengerMocks.messenger, + env: { + isAccountSyncingEnabled: true, + }, + getMetaMetricsState: () => true, + }); + + await controller.syncInternalAccountsWithUserStorage(); + + expect(mockAPI.mockEndpointGetUserStorage.isDone()).toBe(true); + expect(mockAPI.mockEndpointBatchUpsertUserStorage.isDone()).toBe(false); + expect(mockAPI.mockEndpointBatchDeleteUserStorage.isDone()).toBe(true); + }); + + it('fires the onAccountSyncErroneousSituation callback in erroneous situations', async () => { + const onAccountSyncErroneousSituation = jest.fn(); + + const { messengerMocks } = await arrangeMocksForBogusAccounts(); + const controller = new UserStorageController({ + messenger: messengerMocks.messenger, + config: { + accountSyncing: { + onAccountSyncErroneousSituation, + }, + }, + env: { + isAccountSyncingEnabled: true, + }, + getMetaMetricsState: () => true, + }); + + await controller.syncInternalAccountsWithUserStorage(); + + expect(onAccountSyncErroneousSituation).toHaveBeenCalledTimes(1); + }); }); it('fires the onAccountAdded callback when adding an account', async () => { @@ -894,9 +1163,32 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), + mockEndpointBatchDeleteUserStorage: + mockEndpointBatchDeleteUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + undefined, + async (_uri, requestBody) => { + if (typeof requestBody === 'string') { + return; + } + + const expectedBody = createExpectedAccountSyncBatchDeleteBody( + MOCK_USER_STORAGE_ACCOUNTS.SAME_AS_INTERNAL_ALL.filter( + (account) => + !MOCK_INTERNAL_ACCOUNTS.ONE.find( + (internalAccount) => + internalAccount.address === account.a, + ), + ).map((account) => account.a), + MOCK_STORAGE_KEY, + ); + + expect(requestBody.batch_delete).toStrictEqual(expectedBody); + }, + ), }, }; }; @@ -925,6 +1217,8 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto MOCK_USER_STORAGE_ACCOUNTS.SAME_AS_INTERNAL_ALL.length - MOCK_INTERNAL_ACCOUNTS.ONE.length, ); + + expect(mockAPI.mockEndpointBatchDeleteUserStorage.isDone()).toBe(true); }); it('does not create internal accounts if user storage has less accounts', async () => { @@ -950,11 +1244,13 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), mockEndpointBatchUpsertUserStorage: - mockEndpointBatchUpsertUserStorage('accounts'), + mockEndpointBatchUpsertUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + ), }, }; }; @@ -1001,7 +1297,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), }, @@ -1038,11 +1334,13 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), mockEndpointBatchUpsertUserStorage: - mockEndpointBatchUpsertUserStorage('accounts'), + mockEndpointBatchUpsertUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + ), }, }; }; @@ -1078,11 +1376,13 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), mockEndpointBatchUpsertUserStorage: - mockEndpointBatchUpsertUserStorage('accounts'), + mockEndpointBatchUpsertUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + ), }, }; }; @@ -1129,7 +1429,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), }, @@ -1172,7 +1472,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), }, @@ -1209,11 +1509,13 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), mockEndpointBatchUpsertUserStorage: - mockEndpointBatchUpsertUserStorage('accounts'), + mockEndpointBatchUpsertUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + ), }, }; }; @@ -1249,7 +1551,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), }, @@ -1302,7 +1604,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), }, @@ -1345,7 +1647,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), }, @@ -1391,7 +1693,7 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, mockGetEntriesResponse, ), }, @@ -1436,11 +1738,13 @@ describe('user-storage/user-storage-controller - syncInternalAccountsWithUserSto mockAPI: { mockEndpointGetUserStorage: await mockEndpointGetUserStorageAllFeatureEntries( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, await mockUserStorageAccountsResponse(), ), mockEndpointBatchUpsertUserStorage: - mockEndpointBatchUpsertUserStorage('accounts'), + mockEndpointBatchUpsertUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + ), }, }; }; @@ -1489,6 +1793,8 @@ describe('user-storage/user-storage-controller - saveInternalAccountToUserStorag state: { isProfileSyncingEnabled: false, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }, }); @@ -1504,7 +1810,7 @@ describe('user-storage/user-storage-controller - saveInternalAccountToUserStorag return { messengerMocks: mockUserStorageMessenger(), mockAPI: mockEndpointUpsertUserStorage( - `accounts.${MOCK_INTERNAL_ACCOUNTS.ONE[0].address}`, + `${USER_STORAGE_FEATURE_NAMES.accounts}.${MOCK_INTERNAL_ACCOUNTS.ONE[0].address}`, ), }; }; @@ -1530,7 +1836,7 @@ describe('user-storage/user-storage-controller - saveInternalAccountToUserStorag return { messengerMocks: mockUserStorageMessenger(), mockAPI: mockEndpointUpsertUserStorage( - `accounts.${MOCK_INTERNAL_ACCOUNTS.ONE[0].address}`, + `${USER_STORAGE_FEATURE_NAMES.accounts}.${MOCK_INTERNAL_ACCOUNTS.ONE[0].address}`, ), }; }; @@ -1556,7 +1862,7 @@ describe('user-storage/user-storage-controller - saveInternalAccountToUserStorag return { messengerMocks: mockUserStorageMessenger(), mockAPI: mockEndpointUpsertUserStorage( - `accounts.${MOCK_INTERNAL_ACCOUNTS.ONE[0].address}`, + `${USER_STORAGE_FEATURE_NAMES.accounts}.${MOCK_INTERNAL_ACCOUNTS.ONE[0].address}`, { status: 500 }, ), }; @@ -1578,54 +1884,97 @@ describe('user-storage/user-storage-controller - saveInternalAccountToUserStorag ).rejects.toThrow(expect.any(Error)); }); - it('saves an internal account to user storage when the AccountsController:accountRenamed event is fired', async () => { - const { baseMessenger, messenger } = mockUserStorageMessenger(); + describe('it reacts to other controller events', () => { + const mockUserStorageAccountsResponse = async () => { + return { + status: 200, + body: await createMockUserStorageEntries( + MOCK_USER_STORAGE_ACCOUNTS.SAME_AS_INTERNAL_ALL, + ), + }; + }; - const controller = new UserStorageController({ - messenger, - env: { - isAccountSyncingEnabled: true, - }, - getMetaMetricsState: () => true, - }); + const arrangeMocksForAccounts = async () => { + return { + messengerMocks: mockUserStorageMessenger({ + accounts: { + accountsList: + MOCK_INTERNAL_ACCOUNTS.ONE_CUSTOM_NAME_WITH_LAST_UPDATED_MOST_RECENT as InternalAccount[], + }, + }), + mockAPI: { + mockEndpointGetUserStorage: + await mockEndpointGetUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + await mockUserStorageAccountsResponse(), + ), + mockEndpointBatchUpsertUserStorage: + mockEndpointBatchUpsertUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + ), + }, + }; + }; - const mockSaveInternalAccountToUserStorage = jest - .spyOn(controller, 'saveInternalAccountToUserStorage') - .mockImplementation(); + it('saves an internal account to user storage when the AccountsController:accountRenamed event is fired', async () => { + await arrangeMocksForAccounts(); - baseMessenger.publish( - 'AccountsController:accountRenamed', - MOCK_INTERNAL_ACCOUNTS.ONE[0] as InternalAccount, - ); + const { baseMessenger, messenger } = mockUserStorageMessenger(); - expect(mockSaveInternalAccountToUserStorage).toHaveBeenCalledWith( - MOCK_INTERNAL_ACCOUNTS.ONE[0], - ); - }); + const controller = new UserStorageController({ + messenger, + env: { + isAccountSyncingEnabled: true, + }, + getMetaMetricsState: () => true, + }); - it('saves an internal account to user storage when the AccountsController:accountAdded event is fired', async () => { - const { baseMessenger, messenger } = mockUserStorageMessenger(); + // We need to sync at least once before we listen for other controller events + await controller.syncInternalAccountsWithUserStorage(); - const controller = new UserStorageController({ - messenger, - env: { - isAccountSyncingEnabled: true, - }, - getMetaMetricsState: () => true, + const mockSaveInternalAccountToUserStorage = jest + .spyOn(controller, 'saveInternalAccountToUserStorage') + .mockImplementation(); + + baseMessenger.publish( + 'AccountsController:accountRenamed', + MOCK_INTERNAL_ACCOUNTS.ONE[0] as InternalAccount, + ); + + expect(mockSaveInternalAccountToUserStorage).toHaveBeenCalledWith( + MOCK_INTERNAL_ACCOUNTS.ONE[0], + ); }); - const mockSaveInternalAccountToUserStorage = jest - .spyOn(controller, 'saveInternalAccountToUserStorage') - .mockImplementation(); + it('saves an internal account to user storage when the AccountsController:accountAdded event is fired', async () => { + await arrangeMocksForAccounts(); - baseMessenger.publish( - 'AccountsController:accountAdded', - MOCK_INTERNAL_ACCOUNTS.ONE[0] as InternalAccount, - ); + const { baseMessenger, messenger } = mockUserStorageMessenger(); - expect(mockSaveInternalAccountToUserStorage).toHaveBeenCalledWith( - MOCK_INTERNAL_ACCOUNTS.ONE[0], - ); + const controller = new UserStorageController({ + messenger, + env: { + isAccountSyncingEnabled: true, + }, + getMetaMetricsState: () => true, + }); + + // We need to sync at least once before we listen for other controller events + await controller.syncInternalAccountsWithUserStorage(); + + const mockSaveInternalAccountToUserStorage = jest + .spyOn(controller, 'saveInternalAccountToUserStorage') + .mockImplementation(); + + baseMessenger.publish( + 'AccountsController:accountAdded', + MOCK_INTERNAL_ACCOUNTS.ONE[0] as InternalAccount, + ); + + expect(mockSaveInternalAccountToUserStorage).toHaveBeenCalledWith( + MOCK_INTERNAL_ACCOUNTS.ONE[0], + ); + }); }); }); @@ -1901,3 +2250,18 @@ function createExpectedAccountSyncBatchUpsertBody( ), ]); } + +/** + * Test Utility - creates a realistic expected batch delete payload + * @param data - data supposed to be deleted + * @param storageKey - storage key + * @returns expected body + */ +function createExpectedAccountSyncBatchDeleteBody( + data: string[], + storageKey: string, +) { + return data.map((entryKey) => + createSHA256Hash(String(entryKey) + storageKey), + ); +} diff --git a/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.ts b/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.ts index b35f400ca9..845ce62296 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/UserStorageController.ts @@ -23,10 +23,11 @@ import type { NetworkConfiguration } from '@metamask/network-controller'; import type { HandleSnapRequest } from '@metamask/snaps-controllers'; import { createSHA256Hash } from '../../shared/encryption'; -import type { - UserStoragePathWithFeatureAndKey, - UserStoragePathWithFeatureOnly, - UserStoragePathWithKeyOnly, +import type { UserStorageFeatureKeys } from '../../shared/storage-schema'; +import { + USER_STORAGE_FEATURE_NAMES, + type UserStoragePathWithFeatureAndKey, + type UserStoragePathWithFeatureOnly, } from '../../shared/storage-schema'; import type { NativeScrypt } from '../../shared/types/encryption'; import { createSnapSignMessageRequest } from '../authentication/auth-snap-requests'; @@ -44,6 +45,7 @@ import { } from './accounts/user-storage'; import { startNetworkSyncing } from './network-syncing/controller-integration'; import { + batchDeleteUserStorage, batchUpsertUserStorage, deleteUserStorage, deleteUserStorageAllFeatureEntries, @@ -98,11 +100,21 @@ export type UserStorageControllerState = { * Loading state for the profile syncing update */ isProfileSyncingUpdateLoading: boolean; + /** + * Condition used by E2E tests to determine if account syncing has been dispatched at least once. + */ + hasAccountSyncingSyncedAtLeastOnce: boolean; + /** + * Condition used by UI to determine if account syncing is ready to be dispatched. + */ + isAccountSyncingReadyToBeDispatched: boolean; }; export const defaultState: UserStorageControllerState = { isProfileSyncingEnabled: true, isProfileSyncingUpdateLoading: false, + hasAccountSyncingSyncedAtLeastOnce: false, + isAccountSyncingReadyToBeDispatched: false, }; const metadata: StateMetadata = { @@ -114,6 +126,14 @@ const metadata: StateMetadata = { persist: false, anonymous: false, }, + hasAccountSyncingSyncedAtLeastOnce: { + persist: true, + anonymous: true, + }, + isAccountSyncingReadyToBeDispatched: { + persist: false, + anonymous: false, + }, }; type ControllerConfig = { @@ -130,6 +150,15 @@ type ControllerConfig = { * This is used for analytics. */ onAccountNameUpdated?: (profileId: string) => void; + + /** + * Callback that fires when an erroneous situation happens during account sync. + * This is used for analytics. + */ + onAccountSyncErroneousSituation?: ( + profileId: string, + situationMessage: string, + ) => void; }; }; @@ -304,7 +333,10 @@ export default class UserStorageController extends BaseController< 'AccountsController:accountAdded', // eslint-disable-next-line @typescript-eslint/no-misused-promises async (account) => { - if (!this.#accounts.canSync()) { + if ( + !this.#accounts.canSync() || + !this.state.hasAccountSyncingSyncedAtLeastOnce + ) { return; } @@ -316,7 +348,10 @@ export default class UserStorageController extends BaseController< 'AccountsController:accountRenamed', // eslint-disable-next-line @typescript-eslint/no-misused-promises async (account) => { - if (!this.#accounts.canSync()) { + if ( + !this.#accounts.canSync() || + !this.state.hasAccountSyncingSyncedAtLeastOnce + ) { return; } await this.saveInternalAccountToUserStorage(account); @@ -340,7 +375,9 @@ export default class UserStorageController extends BaseController< UserStorageAccount[] | null > => { const rawAccountsListResponse = - await this.performGetStorageAllFeatureEntries('accounts'); + await this.performGetStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + ); return ( rawAccountsListResponse?.map((rawAccount) => JSON.parse(rawAccount)) ?? @@ -355,7 +392,7 @@ export default class UserStorageController extends BaseController< mapInternalAccountToUserStorageAccount(internalAccount); await this.performSetStorage( - `accounts.${internalAccount.address}`, + `${USER_STORAGE_FEATURE_NAMES.accounts}.${internalAccount.address}`, JSON.stringify(mappedAccount), ); }, @@ -371,7 +408,7 @@ export default class UserStorageController extends BaseController< internalAccountsList.map(mapInternalAccountToUserStorageAccount); await this.performBatchSetStorage( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, internalAccountsListFormattedForUserStorage.map((account) => [ account.a, JSON.stringify(account), @@ -671,9 +708,11 @@ export default class UserStorageController extends BaseController< * @param values - data to store, in the form of an array of `[entryKey, entryValue]` pairs * @returns nothing. NOTE that an error is thrown if fails to store data. */ - public async performBatchSetStorage( - path: UserStoragePathWithFeatureOnly, - values: [UserStoragePathWithKeyOnly, string][], + public async performBatchSetStorage< + FeatureName extends UserStoragePathWithFeatureOnly, + >( + path: FeatureName, + values: [UserStorageFeatureKeys, string][], ): Promise { this.#assertProfileSyncingEnabled(); @@ -731,6 +770,33 @@ export default class UserStorageController extends BaseController< }); } + /** + * Allows delete of multiple user data entries for one specific feature. Data deleted must be string formatted. + * Developers can extend the entry path through the `schema.ts` file. + * + * @param path - string in the form of `${feature}` that matches schema + * @param values - data to store, in the form of an array of entryKey[] + * @returns nothing. NOTE that an error is thrown if fails to store data. + */ + public async performBatchDeleteStorage< + FeatureName extends UserStoragePathWithFeatureOnly, + >( + path: FeatureName, + values: UserStorageFeatureKeys[], + ): Promise { + this.#assertProfileSyncingEnabled(); + + const { bearerToken, storageKey } = + await this.#getStorageKeyAndBearerToken(); + + await batchDeleteUserStorage(values, { + path, + bearerToken, + storageKey, + nativeScryptCrypto: this.#nativeScryptCrypto, + }); + } + /** * Retrieves the storage key, for internal use only! * @@ -819,6 +885,24 @@ export default class UserStorageController extends BaseController< }); } + private async setHasAccountSyncingSyncedAtLeastOnce( + hasAccountSyncingSyncedAtLeastOnce: boolean, + ): Promise { + this.update((state) => { + state.hasAccountSyncingSyncedAtLeastOnce = + hasAccountSyncingSyncedAtLeastOnce; + }); + } + + public async setIsAccountSyncingReadyToBeDispatched( + isAccountSyncingReadyToBeDispatched: boolean, + ): Promise { + this.update((state) => { + state.isAccountSyncingReadyToBeDispatched = + isAccountSyncingReadyToBeDispatched; + }); + } + /** * Syncs the internal accounts list with the user storage accounts list. * This method is used to make sure that the internal accounts list is up-to-date with the user storage accounts list and vice-versa. @@ -839,6 +923,7 @@ export default class UserStorageController extends BaseController< if (!userStorageAccountsList || !userStorageAccountsList.length) { await this.#accounts.saveInternalAccountsListToUserStorage(); + await this.setHasAccountSyncingSyncedAtLeastOnce(true); return; } @@ -847,18 +932,19 @@ export default class UserStorageController extends BaseController< // Compare internal accounts list with user storage accounts list // First step: compare lengths - let internalAccountsList = await this.#accounts.getInternalAccountsList(); + const internalAccountsList = + await this.#accounts.getInternalAccountsList(); if (!internalAccountsList || !internalAccountsList.length) { throw new Error(`Failed to get internal accounts list`); } - const hasMoreInternalAccountsThanUserStorageAccounts = - internalAccountsList.length > userStorageAccountsList.length; + const hasMoreUserStorageAccountsThanInternalAccounts = + userStorageAccountsList.length > internalAccountsList.length; // We don't want to remove existing accounts for a user - // so we only add new accounts if the user has more accounts than the internal accounts list - if (!hasMoreInternalAccountsThanUserStorageAccounts) { + // so we only add new accounts if the user has more accounts in user storage than internal accounts + if (hasMoreUserStorageAccountsThanInternalAccounts) { const numberOfAccountsToAdd = Math.min( userStorageAccountsList.length, @@ -866,28 +952,45 @@ export default class UserStorageController extends BaseController< ) - internalAccountsList.length; // Create new accounts to match the user storage accounts list - for (let i = 0; i < numberOfAccountsToAdd; i++) { await this.messagingSystem.call('KeyringController:addNewAccount'); - this.#config?.accountSyncing?.onAccountAdded?.(profileId); } } // Second step: compare account names // Get the internal accounts list again since new accounts might have been added in the previous step - internalAccountsList = await this.#accounts.getInternalAccountsList(); + const refreshedInternalAccountsList = + await this.#accounts.getInternalAccountsList(); + + const newlyAddedAccounts = refreshedInternalAccountsList.filter( + (account) => + !internalAccountsList.find((a) => a.address === account.address), + ); - for (const internalAccount of internalAccountsList) { + for (const internalAccount of refreshedInternalAccountsList) { const userStorageAccount = userStorageAccountsList.find( (account) => account.a === internalAccount.address, ); + // If the account is not present in user storage if (!userStorageAccount) { + // If the account was just added in the previous step, skip saving it, it's likely to be a bogus account + if (newlyAddedAccounts.includes(internalAccount)) { + this.#config?.accountSyncing?.onAccountSyncErroneousSituation?.( + profileId, + 'An account was added to the internal accounts list but was not present in the user storage accounts list', + ); + continue; + } + // Otherwise, it means that this internal account was present before the sync, and needs to be saved to the user storage internalAccountsToBeSavedToUserStorage.push(internalAccount); continue; } + // From this point on, we know that the account is present in + // both the internal accounts list and the user storage accounts list + // One or both accounts have default names const isInternalAccountNameDefault = isNameDefaultAccountName( internalAccount.metadata.name, @@ -958,13 +1061,37 @@ export default class UserStorageController extends BaseController< } // Save the internal accounts list to the user storage - await this.performBatchSetStorage( - 'accounts', - internalAccountsToBeSavedToUserStorage.map((account) => [ - account.address, - JSON.stringify(mapInternalAccountToUserStorageAccount(account)), - ]), + if (internalAccountsToBeSavedToUserStorage.length) { + await this.performBatchSetStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + internalAccountsToBeSavedToUserStorage.map((account) => [ + account.address, + JSON.stringify(mapInternalAccountToUserStorageAccount(account)), + ]), + ); + } + + // In case we have corrupted user storage with accounts that don't exist in the internal accounts list + // Delete those accounts from the user storage + const userStorageAccountsToBeDeleted = userStorageAccountsList.filter( + (account) => + !refreshedInternalAccountsList.find((a) => a.address === account.a), ); + + if (userStorageAccountsToBeDeleted.length) { + await this.performBatchDeleteStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + userStorageAccountsToBeDeleted.map((account) => account.a), + ); + this.#config?.accountSyncing?.onAccountSyncErroneousSituation?.( + profileId, + 'An account was present in the user storage accounts list but was not found in the internal accounts list after the sync', + ); + } + + // We do this here and not in the finally statement because we want to make sure that + // the accounts are saved / updated / deleted at least once before we set this flag + await this.setHasAccountSyncingSyncedAtLeastOnce(true); } catch (e) { const errorMessage = e instanceof Error ? e.message : JSON.stringify(e); throw new Error( diff --git a/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockAccounts.ts b/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockAccounts.ts index 1c4623b2d0..5103853d44 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockAccounts.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockAccounts.ts @@ -153,6 +153,18 @@ export const MOCK_USER_STORAGE_ACCOUNTS = { ONE: mapInternalAccountsListToUserStorageAccountsList( MOCK_INTERNAL_ACCOUNTS.ONE as InternalAccount[], ), + TWO_DEFAULT_NAMES_WITH_ONE_BOGUS: + mapInternalAccountsListToUserStorageAccountsList([ + ...MOCK_INTERNAL_ACCOUNTS.ONE_DEFAULT_NAME, + { + ...MOCK_INTERNAL_ACCOUNTS.ONE_DEFAULT_NAME[0], + address: '0x000000', + metadata: { + name: `${getMockRandomDefaultAccountName()} 1`, + nameLastUpdatedAt: 2, + }, + }, + ] as InternalAccount[]), ONE_DEFAULT_NAME: mapInternalAccountsListToUserStorageAccountsList( MOCK_INTERNAL_ACCOUNTS.ONE_DEFAULT_NAME as InternalAccount[], ), diff --git a/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockResponses.ts b/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockResponses.ts index a3149f8dbe..218eb9a997 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockResponses.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockResponses.ts @@ -2,7 +2,10 @@ import type { UserStoragePathWithFeatureAndKey, UserStoragePathWithFeatureOnly, } from '../../../shared/storage-schema'; -import { createEntryPath } from '../../../shared/storage-schema'; +import { + createEntryPath, + USER_STORAGE_FEATURE_NAMES, +} from '../../../shared/storage-schema'; import type { GetUserStorageAllFeatureEntriesResponse, GetUserStorageResponse, @@ -72,7 +75,7 @@ export async function createMockAllFeatureEntriesResponse( * @returns mock GET API request. Can be used by e2e or unit mock servers */ export async function getMockUserStorageGetResponse( - path: UserStoragePathWithFeatureAndKey = 'notifications.notification_settings', + path: UserStoragePathWithFeatureAndKey = `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ) { return { url: getMockUserStorageEndpoint(path), @@ -88,7 +91,7 @@ export async function getMockUserStorageGetResponse( * @returns mock GET ALL API request. Can be used by e2e or unit mock servers */ export async function getMockUserStorageAllFeatureEntriesResponse( - path: UserStoragePathWithFeatureOnly = 'notifications', + path: UserStoragePathWithFeatureOnly = USER_STORAGE_FEATURE_NAMES.notifications, dataArr?: string[], ) { return { @@ -99,7 +102,7 @@ export async function getMockUserStorageAllFeatureEntriesResponse( } export const getMockUserStoragePutResponse = ( - path: UserStoragePathWithFeatureAndKey = 'notifications.notification_settings', + path: UserStoragePathWithFeatureAndKey = `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ) => { return { url: getMockUserStorageEndpoint(path), @@ -109,6 +112,16 @@ export const getMockUserStoragePutResponse = ( }; export const getMockUserStorageBatchPutResponse = ( + path: UserStoragePathWithFeatureOnly = USER_STORAGE_FEATURE_NAMES.notifications, +) => { + return { + url: getMockUserStorageEndpoint(path), + requestMethod: 'PUT', + response: null, + } satisfies MockResponse; +}; + +export const getMockUserStorageBatchDeleteResponse = ( path: UserStoragePathWithFeatureOnly = 'notifications', ) => { return { @@ -119,7 +132,7 @@ export const getMockUserStorageBatchPutResponse = ( }; export const deleteMockUserStorageResponse = ( - path: UserStoragePathWithFeatureAndKey = 'notifications.notification_settings', + path: UserStoragePathWithFeatureAndKey = `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ) => { return { url: getMockUserStorageEndpoint(path), @@ -129,7 +142,7 @@ export const deleteMockUserStorageResponse = ( }; export const deleteMockUserStorageAllFeatureEntriesResponse = ( - path: UserStoragePathWithFeatureOnly = 'notifications', + path: UserStoragePathWithFeatureOnly = USER_STORAGE_FEATURE_NAMES.notifications, ) => { return { url: getMockUserStorageEndpoint(path), diff --git a/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockServices.ts b/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockServices.ts index f5f12a1b7e..8f018271a9 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockServices.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/__fixtures__/mockServices.ts @@ -1,14 +1,16 @@ import nock from 'nock'; -import type { - UserStoragePathWithFeatureAndKey, - UserStoragePathWithFeatureOnly, +import { + USER_STORAGE_FEATURE_NAMES, + type UserStoragePathWithFeatureAndKey, + type UserStoragePathWithFeatureOnly, } from '../../../shared/storage-schema'; import { getMockUserStorageGetResponse, getMockUserStoragePutResponse, getMockUserStorageAllFeatureEntriesResponse, getMockUserStorageBatchPutResponse, + getMockUserStorageBatchDeleteResponse, deleteMockUserStorageAllFeatureEntriesResponse, deleteMockUserStorageResponse, } from './mockResponses'; @@ -19,7 +21,7 @@ type MockReply = { }; export const mockEndpointGetUserStorageAllFeatureEntries = async ( - path: UserStoragePathWithFeatureOnly = 'notifications', + path: UserStoragePathWithFeatureOnly = USER_STORAGE_FEATURE_NAMES.notifications, mockReply?: MockReply, ) => { const mockResponse = await getMockUserStorageAllFeatureEntriesResponse(path); @@ -36,7 +38,7 @@ export const mockEndpointGetUserStorageAllFeatureEntries = async ( }; export const mockEndpointGetUserStorage = async ( - path: UserStoragePathWithFeatureAndKey = 'notifications.notification_settings', + path: UserStoragePathWithFeatureAndKey = `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, mockReply?: MockReply, ) => { const mockResponse = await getMockUserStorageGetResponse(path); @@ -53,7 +55,7 @@ export const mockEndpointGetUserStorage = async ( }; export const mockEndpointUpsertUserStorage = ( - path: UserStoragePathWithFeatureAndKey = 'notifications.notification_settings', + path: UserStoragePathWithFeatureAndKey = `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, mockReply?: Pick, expectCallback?: (requestBody: nock.Body) => Promise, ) => { @@ -67,7 +69,7 @@ export const mockEndpointUpsertUserStorage = ( }; export const mockEndpointBatchUpsertUserStorage = ( - path: UserStoragePathWithFeatureOnly = 'notifications', + path: UserStoragePathWithFeatureOnly = USER_STORAGE_FEATURE_NAMES.notifications, mockReply?: Pick, callback?: (uri: string, requestBody: nock.Body) => Promise, ) => { @@ -81,7 +83,7 @@ export const mockEndpointBatchUpsertUserStorage = ( }; export const mockEndpointDeleteUserStorage = ( - path: UserStoragePathWithFeatureAndKey = 'notifications.notification_settings', + path: UserStoragePathWithFeatureAndKey = `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, mockReply?: MockReply, ) => { const mockResponse = deleteMockUserStorageResponse(path); @@ -95,7 +97,7 @@ export const mockEndpointDeleteUserStorage = ( }; export const mockEndpointDeleteUserStorageAllFeatureEntries = ( - path: UserStoragePathWithFeatureOnly = 'notifications', + path: UserStoragePathWithFeatureOnly = USER_STORAGE_FEATURE_NAMES.notifications, mockReply?: MockReply, ) => { const mockResponse = deleteMockUserStorageAllFeatureEntriesResponse(path); @@ -107,3 +109,17 @@ export const mockEndpointDeleteUserStorageAllFeatureEntries = ( return mockEndpoint; }; + +export const mockEndpointBatchDeleteUserStorage = ( + path: UserStoragePathWithFeatureOnly = 'notifications', + mockReply?: Pick, + callback?: (uri: string, requestBody: nock.Body) => Promise, +) => { + const mockResponse = getMockUserStorageBatchDeleteResponse(path); + const mockEndpoint = nock(mockResponse.url) + .put('') + .reply(mockReply?.status ?? 204, async (uri, requestBody) => { + return await callback?.(uri, requestBody); + }); + return mockEndpoint; +}; diff --git a/packages/profile-sync-controller/src/controllers/user-storage/index.ts b/packages/profile-sync-controller/src/controllers/user-storage/index.ts index 074e73da40..24a74f5c89 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/index.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/index.ts @@ -6,3 +6,4 @@ export default UserStorageController; export * from './UserStorageController'; export * as Mocks from './__fixtures__'; export * from '../../shared/encryption'; +export * from '../../shared/storage-schema'; diff --git a/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.test.ts b/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.test.ts index 69cb8c6c14..b9915116ca 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.test.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.test.ts @@ -1,3 +1,4 @@ +import { USER_STORAGE_FEATURE_NAMES } from '../../../shared/storage-schema'; import { MOCK_STORAGE_KEY, createMockAllFeatureEntriesResponse, @@ -44,7 +45,7 @@ describe('network-syncing/services - getAllRemoteNetworks()', () => { return { mockGetAllAPI: await mockEndpointGetUserStorageAllFeatureEntries( - 'networks', + USER_STORAGE_FEATURE_NAMES.networks, payload, ), }; @@ -101,7 +102,9 @@ describe('network-syncing/services - upsertRemoteNetwork()', () => { return { storageOps: storageOpts, mockNetwork, - mockUpsertAPI: mockEndpointUpsertUserStorage('networks.0x1337'), + mockUpsertAPI: mockEndpointUpsertUserStorage( + `${USER_STORAGE_FEATURE_NAMES.networks}.0x1337`, + ), }; }; diff --git a/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.ts b/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.ts index e464d55463..2dab5e89af 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/services.ts @@ -1,3 +1,4 @@ +import { USER_STORAGE_FEATURE_NAMES } from '../../../shared/storage-schema'; import type { UserStorageBaseOptions } from '../services'; import { getUserStorageAllFeatureEntries, @@ -35,7 +36,7 @@ export async function getAllRemoteNetworks( const rawResults = (await getUserStorageAllFeatureEntries({ ...opts, - path: 'networks', + path: USER_STORAGE_FEATURE_NAMES.networks, })) ?? []; const results = rawResults diff --git a/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/sync-mutations.test.ts b/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/sync-mutations.test.ts index 53f877f8d9..c40101fec2 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/sync-mutations.test.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/network-syncing/sync-mutations.test.ts @@ -1,5 +1,6 @@ import type { NetworkConfiguration } from '@metamask/network-controller'; +import { USER_STORAGE_FEATURE_NAMES } from '../../../shared/storage-schema'; import { MOCK_STORAGE_KEY } from '../__fixtures__'; import { mockEndpointUpsertUserStorage } from '../__fixtures__/mockServices'; import type { UserStorageBaseOptions } from '../services'; @@ -37,7 +38,9 @@ const testMatrix = [ describe('network-syncing/sync - updateNetwork() / addNetwork() / deleteNetwork()', () => { it.each(testMatrix)('should successfully call $fnName', async ({ act }) => { const mockNetwork = arrangeMockNetwork(); - const mockUpsertAPI = mockEndpointUpsertUserStorage('networks.0x1337'); + const mockUpsertAPI = mockEndpointUpsertUserStorage( + `${USER_STORAGE_FEATURE_NAMES.networks}.0x1337`, + ); await act(mockNetwork); expect(mockUpsertAPI.isDone()).toBe(true); }); @@ -46,9 +49,12 @@ describe('network-syncing/sync - updateNetwork() / addNetwork() / deleteNetwork( 'should throw error when calling $fnName when API fails', async ({ act }) => { const mockNetwork = arrangeMockNetwork(); - const mockUpsertAPI = mockEndpointUpsertUserStorage('networks.0x1337', { - status: 500, - }); + const mockUpsertAPI = mockEndpointUpsertUserStorage( + `${USER_STORAGE_FEATURE_NAMES.networks}.0x1337`, + { + status: 500, + }, + ); await expect(async () => await act(mockNetwork)).rejects.toThrow( expect.any(Error), ); diff --git a/packages/profile-sync-controller/src/controllers/user-storage/services.test.ts b/packages/profile-sync-controller/src/controllers/user-storage/services.test.ts index ef14dc2c7c..7fda37ed15 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/services.test.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/services.test.ts @@ -1,11 +1,12 @@ -import type { UserStoragePathWithKeyOnly } from 'src/shared/storage-schema'; - import encryption, { createSHA256Hash } from '../../shared/encryption'; +import type { UserStorageFeatureKeys } from '../../shared/storage-schema'; +import { USER_STORAGE_FEATURE_NAMES } from '../../shared/storage-schema'; import { mockEndpointGetUserStorage, mockEndpointUpsertUserStorage, mockEndpointGetUserStorageAllFeatureEntries, mockEndpointBatchUpsertUserStorage, + mockEndpointBatchDeleteUserStorage, mockEndpointDeleteUserStorageAllFeatureEntries, mockEndpointDeleteUserStorage, } from './__fixtures__/mockServices'; @@ -16,6 +17,7 @@ import { import type { GetUserStorageResponse } from './services'; import { batchUpsertUserStorage, + batchDeleteUserStorage, getUserStorage, getUserStorageAllFeatureEntries, upsertUserStorage, @@ -27,7 +29,7 @@ describe('user-storage/services.ts - getUserStorage() tests', () => { const actCallGetUserStorage = async () => { return await getUserStorage({ bearerToken: 'MOCK_BEARER_TOKEN', - path: 'notifications.notification_settings', + path: `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, storageKey: MOCK_STORAGE_KEY, }); }; @@ -42,7 +44,7 @@ describe('user-storage/services.ts - getUserStorage() tests', () => { it('returns null if endpoint does not have entry', async () => { const mockGetUserStorage = await mockEndpointGetUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 404 }, ); const result = await actCallGetUserStorage(); @@ -53,7 +55,7 @@ describe('user-storage/services.ts - getUserStorage() tests', () => { it('returns null if endpoint fails', async () => { const mockGetUserStorage = await mockEndpointGetUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 500 }, ); const result = await actCallGetUserStorage(); @@ -68,7 +70,7 @@ describe('user-storage/services.ts - getUserStorage() tests', () => { Data: 'Bad Encrypted Data', }; const mockGetUserStorage = await mockEndpointGetUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 200, body: badResponseData, @@ -85,14 +87,16 @@ describe('user-storage/services.ts - getUserStorageAllFeatureEntries() tests', ( const actCallGetUserStorageAllFeatureEntries = async () => { return await getUserStorageAllFeatureEntries({ bearerToken: 'MOCK_BEARER_TOKEN', - path: 'notifications', + path: USER_STORAGE_FEATURE_NAMES.notifications, storageKey: MOCK_STORAGE_KEY, }); }; it('returns user storage data', async () => { const mockGetUserStorageAllFeatureEntries = - await mockEndpointGetUserStorageAllFeatureEntries('notifications'); + await mockEndpointGetUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + ); const result = await actCallGetUserStorageAllFeatureEntries(); mockGetUserStorageAllFeatureEntries.done(); @@ -101,9 +105,12 @@ describe('user-storage/services.ts - getUserStorageAllFeatureEntries() tests', ( it('returns null if endpoint does not have entry', async () => { const mockGetUserStorage = - await mockEndpointGetUserStorageAllFeatureEntries('notifications', { - status: 404, - }); + await mockEndpointGetUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + { + status: 404, + }, + ); const result = await actCallGetUserStorageAllFeatureEntries(); mockGetUserStorage.done(); @@ -112,9 +119,12 @@ describe('user-storage/services.ts - getUserStorageAllFeatureEntries() tests', ( it('returns null if endpoint fails', async () => { const mockGetUserStorage = - await mockEndpointGetUserStorageAllFeatureEntries('notifications', { - status: 500, - }); + await mockEndpointGetUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + { + status: 500, + }, + ); const result = await actCallGetUserStorageAllFeatureEntries(); mockGetUserStorage.done(); @@ -127,10 +137,13 @@ describe('user-storage/services.ts - getUserStorageAllFeatureEntries() tests', ( Data: 'Bad Encrypted Data', }; const mockGetUserStorage = - await mockEndpointGetUserStorageAllFeatureEntries('notifications', { - status: 200, - body: badResponseData, - }); + await mockEndpointGetUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.notifications, + { + status: 200, + body: badResponseData, + }, + ); const result = await actCallGetUserStorageAllFeatureEntries(); mockGetUserStorage.done(); @@ -142,14 +155,14 @@ describe('user-storage/services.ts - upsertUserStorage() tests', () => { const actCallUpsertUserStorage = async () => { return await upsertUserStorage(MOCK_STORAGE_DATA, { bearerToken: 'MOCK_BEARER_TOKEN', - path: 'notifications.notification_settings', + path: `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, storageKey: MOCK_STORAGE_KEY, }); }; it('invokes upsert endpoint with no errors', async () => { const mockUpsertUserStorage = mockEndpointUpsertUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, undefined, async (requestBody) => { if (typeof requestBody === 'string') { @@ -172,7 +185,7 @@ describe('user-storage/services.ts - upsertUserStorage() tests', () => { it('throws error if unable to upsert user storage', async () => { const mockUpsertUserStorage = mockEndpointUpsertUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 500, }, @@ -184,7 +197,10 @@ describe('user-storage/services.ts - upsertUserStorage() tests', () => { }); describe('user-storage/services.ts - batchUpsertUserStorage() tests', () => { - const dataToStore: [UserStoragePathWithKeyOnly, string][] = [ + const dataToStore: [ + UserStorageFeatureKeys, + string, + ][] = [ ['0x123', MOCK_STORAGE_DATA], ['0x456', MOCK_STORAGE_DATA], ]; @@ -192,14 +208,14 @@ describe('user-storage/services.ts - batchUpsertUserStorage() tests', () => { const actCallBatchUpsertUserStorage = async () => { return await batchUpsertUserStorage(dataToStore, { bearerToken: 'MOCK_BEARER_TOKEN', - path: 'accounts', + path: USER_STORAGE_FEATURE_NAMES.accounts, storageKey: MOCK_STORAGE_KEY, }); }; it('invokes upsert endpoint with no errors', async () => { const mockUpsertUserStorage = mockEndpointBatchUpsertUserStorage( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, undefined, async (_uri, requestBody) => { if (typeof requestBody === 'string') { @@ -233,7 +249,7 @@ describe('user-storage/services.ts - batchUpsertUserStorage() tests', () => { it('throws error if unable to upsert user storage', async () => { const mockUpsertUserStorage = mockEndpointBatchUpsertUserStorage( - 'accounts', + USER_STORAGE_FEATURE_NAMES.accounts, { status: 500, }, @@ -249,7 +265,7 @@ describe('user-storage/services.ts - batchUpsertUserStorage() tests', () => { describe('user-storage/services.ts - deleteUserStorage() tests', () => { const actCallDeleteUserStorage = async () => { return await deleteUserStorage({ - path: 'notifications.notification_settings', + path: `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, bearerToken: 'MOCK_BEARER_TOKEN', storageKey: MOCK_STORAGE_KEY, }); @@ -257,7 +273,7 @@ describe('user-storage/services.ts - deleteUserStorage() tests', () => { it('invokes delete endpoint with no errors', async () => { const mockDeleteUserStorage = mockEndpointDeleteUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ); await actCallDeleteUserStorage(); @@ -267,7 +283,7 @@ describe('user-storage/services.ts - deleteUserStorage() tests', () => { it('throws error if unable to delete user storage', async () => { const mockDeleteUserStorage = mockEndpointDeleteUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 500 }, ); @@ -277,7 +293,7 @@ describe('user-storage/services.ts - deleteUserStorage() tests', () => { it('throws error if feature not found', async () => { const mockDeleteUserStorage = mockEndpointDeleteUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 404 }, ); @@ -289,7 +305,7 @@ describe('user-storage/services.ts - deleteUserStorage() tests', () => { it('throws error if unable to get user storage', async () => { const mockDeleteUserStorage = mockEndpointDeleteUserStorage( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, { status: 400 }, ); @@ -304,14 +320,17 @@ describe('user-storage/services.ts - deleteUserStorageAllFeatureEntries() tests' const actCallDeleteUserStorageAllFeatureEntries = async () => { return await deleteUserStorageAllFeatureEntries({ bearerToken: 'MOCK_BEARER_TOKEN', - path: 'accounts', + path: USER_STORAGE_FEATURE_NAMES.accounts, storageKey: MOCK_STORAGE_KEY, }); }; it('invokes delete endpoint with no errors', async () => { const mockDeleteUserStorage = - mockEndpointDeleteUserStorageAllFeatureEntries('accounts', undefined); + mockEndpointDeleteUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + undefined, + ); await actCallDeleteUserStorageAllFeatureEntries(); @@ -320,9 +339,12 @@ describe('user-storage/services.ts - deleteUserStorageAllFeatureEntries() tests' it('throws error if unable to delete user storage', async () => { const mockDeleteUserStorage = - mockEndpointDeleteUserStorageAllFeatureEntries('accounts', { - status: 500, - }); + mockEndpointDeleteUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + { + status: 500, + }, + ); await expect(actCallDeleteUserStorageAllFeatureEntries()).rejects.toThrow( expect.any(Error), @@ -332,9 +354,12 @@ describe('user-storage/services.ts - deleteUserStorageAllFeatureEntries() tests' it('throws error if feature not found', async () => { const mockDeleteUserStorage = - mockEndpointDeleteUserStorageAllFeatureEntries('accounts', { - status: 404, - }); + mockEndpointDeleteUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + { + status: 404, + }, + ); await expect(actCallDeleteUserStorageAllFeatureEntries()).rejects.toThrow( 'user-storage - feature not found', @@ -344,9 +369,12 @@ describe('user-storage/services.ts - deleteUserStorageAllFeatureEntries() tests' it('throws error if unable to get user storage', async () => { const mockDeleteUserStorage = - mockEndpointDeleteUserStorageAllFeatureEntries('accounts', { - status: 400, - }); + mockEndpointDeleteUserStorageAllFeatureEntries( + USER_STORAGE_FEATURE_NAMES.accounts, + { + status: 400, + }, + ); await expect(actCallDeleteUserStorageAllFeatureEntries()).rejects.toThrow( 'user-storage - unable to delete data', @@ -354,3 +382,53 @@ describe('user-storage/services.ts - deleteUserStorageAllFeatureEntries() tests' mockDeleteUserStorage.done(); }); }); + +describe('user-storage/services.ts - batchDeleteUserStorage() tests', () => { + const keysToDelete: UserStorageFeatureKeys< + typeof USER_STORAGE_FEATURE_NAMES.accounts + >[] = ['0x123', '0x456']; + + const actCallBatchDeleteUserStorage = async () => { + return await batchDeleteUserStorage(keysToDelete, { + bearerToken: 'MOCK_BEARER_TOKEN', + path: USER_STORAGE_FEATURE_NAMES.accounts, + storageKey: MOCK_STORAGE_KEY, + }); + }; + + it('invokes upsert endpoint with no errors', async () => { + const mockDeleteUserStorage = mockEndpointBatchDeleteUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + undefined, + async (_uri, requestBody) => { + if (typeof requestBody === 'string') { + return; + } + + const expectedBody = keysToDelete.map((entryKey: string) => + createSHA256Hash(String(entryKey) + MOCK_STORAGE_KEY), + ); + + expect(requestBody.batch_delete).toStrictEqual(expectedBody); + }, + ); + + await actCallBatchDeleteUserStorage(); + + expect(mockDeleteUserStorage.isDone()).toBe(true); + }); + + it('throws error if unable to upsert user storage', async () => { + const mockDeleteUserStorage = mockEndpointBatchDeleteUserStorage( + USER_STORAGE_FEATURE_NAMES.accounts, + { + status: 500, + }, + ); + + await expect(actCallBatchDeleteUserStorage()).rejects.toThrow( + expect.any(Error), + ); + mockDeleteUserStorage.done(); + }); +}); diff --git a/packages/profile-sync-controller/src/controllers/user-storage/services.ts b/packages/profile-sync-controller/src/controllers/user-storage/services.ts index 1345ec2034..6680d8ca73 100644 --- a/packages/profile-sync-controller/src/controllers/user-storage/services.ts +++ b/packages/profile-sync-controller/src/controllers/user-storage/services.ts @@ -5,7 +5,6 @@ import { Env, getEnvUrls } from '../../shared/env'; import type { UserStoragePathWithFeatureAndKey, UserStoragePathWithFeatureOnly, - UserStoragePathWithKeyOnly, } from '../../shared/storage-schema'; import { createEntryPath } from '../../shared/storage-schema'; import type { NativeScrypt } from '../../shared/types/encryption'; @@ -204,7 +203,7 @@ export async function upsertUserStorage( * @param opts - storage options */ export async function batchUpsertUserStorage( - data: [UserStoragePathWithKeyOnly, string][], + data: [string, string][], opts: UserStorageBatchUpsertOptions, ): Promise { if (!data.length) { @@ -269,6 +268,46 @@ export async function deleteUserStorage( } } +/** + * User Storage Service - Delete multiple storage entries for one specific feature. + * You cannot use this method to delete multiple features at once. + * + * @param data - data to delete, in the form of an array entryKey[] + * @param opts - storage options + */ +export async function batchDeleteUserStorage( + data: string[], + opts: UserStorageBatchUpsertOptions, +): Promise { + if (!data.length) { + return; + } + + const { bearerToken, path, storageKey } = opts; + + const encryptedData: string[] = []; + + for (const d of data) { + encryptedData.push(createSHA256Hash(d + storageKey)); + } + + const url = new URL(`${USER_STORAGE_ENDPOINT}/${path}`); + + const res = await fetch(url.toString(), { + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${bearerToken}`, + }, + // eslint-disable-next-line @typescript-eslint/naming-convention + body: JSON.stringify({ batch_delete: encryptedData }), + }); + + if (!res.ok) { + throw new Error('user-storage - unable to batch delete data'); + } +} + /** * User Storage Service - Delete all storage entries for a specific feature. * diff --git a/packages/profile-sync-controller/src/sdk/__fixtures__/mock-userstorage.ts b/packages/profile-sync-controller/src/sdk/__fixtures__/mock-userstorage.ts index af861e8bae..6b37dc41d0 100644 --- a/packages/profile-sync-controller/src/sdk/__fixtures__/mock-userstorage.ts +++ b/packages/profile-sync-controller/src/sdk/__fixtures__/mock-userstorage.ts @@ -2,6 +2,7 @@ import nock from 'nock'; import encryption from '../../shared/encryption'; import { Env } from '../../shared/env'; +import { USER_STORAGE_FEATURE_NAMES } from '../../shared/storage-schema'; import { STORAGE_URL } from '../user-storage'; type MockReply = { @@ -12,11 +13,11 @@ type MockReply = { // Example mock notifications storage entry (wildcard) const MOCK_STORAGE_URL = STORAGE_URL( Env.DEV, - 'notifications/notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ); const MOCK_STORAGE_URL_ALL_FEATURE_ENTRIES = STORAGE_URL( Env.DEV, - 'notifications', + USER_STORAGE_FEATURE_NAMES.notifications, ); export const MOCK_STORAGE_KEY = 'MOCK_STORAGE_KEY'; @@ -77,6 +78,21 @@ export const handleMockUserStoragePut = ( return mockEndpoint; }; +export const handleMockUserStorageBatchDelete = ( + mockReply?: MockReply, + callback?: (uri: string, requestBody: nock.Body) => Promise, +) => { + const reply = mockReply ?? { status: 204 }; + const mockEndpoint = nock(MOCK_STORAGE_URL) + .persist() + .put(/.*/u) + .reply(reply.status, async (uri, requestBody) => { + return await callback?.(uri, requestBody); + }); + + return mockEndpoint; +}; + export const handleMockUserStorageDelete = async (mockReply?: MockReply) => { const reply = mockReply ?? { status: 204 }; const mockEndpoint = nock(MOCK_STORAGE_URL) diff --git a/packages/profile-sync-controller/src/sdk/index.ts b/packages/profile-sync-controller/src/sdk/index.ts index e9daa6213e..21bedd22a7 100644 --- a/packages/profile-sync-controller/src/sdk/index.ts +++ b/packages/profile-sync-controller/src/sdk/index.ts @@ -4,3 +4,4 @@ export * from './errors'; export * from './utils/messaging-signing-snap-requests'; export * from '../shared/encryption'; export * from '../shared/env'; +export * from '../shared/storage-schema'; diff --git a/packages/profile-sync-controller/src/sdk/user-storage.test.ts b/packages/profile-sync-controller/src/sdk/user-storage.test.ts index 9eb303beab..7782d4c239 100644 --- a/packages/profile-sync-controller/src/sdk/user-storage.test.ts +++ b/packages/profile-sync-controller/src/sdk/user-storage.test.ts @@ -1,7 +1,7 @@ -import type { UserStoragePathWithKeyOnly } from 'src/shared/storage-schema'; - import encryption, { createSHA256Hash } from '../shared/encryption'; import { Env } from '../shared/env'; +import type { UserStorageFeatureKeys } from '../shared/storage-schema'; +import { USER_STORAGE_FEATURE_NAMES } from '../shared/storage-schema'; import { arrangeAuthAPIs } from './__fixtures__/mock-auth'; import { MOCK_NOTIFICATIONS_DATA, @@ -11,6 +11,7 @@ import { handleMockUserStorageGetAllFeatureEntries, handleMockUserStorageDeleteAllFeatureEntries, handleMockUserStorageDelete, + handleMockUserStorageBatchDelete, } from './__fixtures__/mock-userstorage'; import { arrangeAuth, typedMockFn } from './__fixtures__/test-utils'; import type { IBaseAuth } from './authentication-jwt-bearer/types'; @@ -40,13 +41,16 @@ describe('User Storage', () => { // Test Set const data = JSON.stringify(MOCK_NOTIFICATIONS_DATA); - await userStorage.setItem('notifications.notification_settings', data); + await userStorage.setItem( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + data, + ); expect(mockPut.isDone()).toBe(true); expect(mockGet.isDone()).toBe(false); // Test Get (we expect the mocked encrypted data to be decrypt-able with the given Mock Storage Key) const response = await userStorage.getItem( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ); expect(mockGet.isDone()).toBe(true); expect(response).toBe(data); @@ -68,13 +72,16 @@ describe('User Storage', () => { // Test Set const data = JSON.stringify(MOCK_NOTIFICATIONS_DATA); - await userStorage.setItem('notifications.notification_settings', data); + await userStorage.setItem( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + data, + ); expect(mockPut.isDone()).toBe(true); expect(mockGet.isDone()).toBe(false); // Test Get (we expect the mocked encrypted data to be decrypt-able with the given Mock Storage Key) const response = await userStorage.getItem( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, ); expect(mockGet.isDone()).toBe(true); expect(response).toBe(data); @@ -88,14 +95,17 @@ describe('User Storage', () => { const data = JSON.stringify(MOCK_NOTIFICATIONS_DATA); const responseAllFeatureEntries = await userStorage.getAllFeatureItems( - 'notifications', + USER_STORAGE_FEATURE_NAMES.notifications, ); expect(mockGetAll.isDone()).toBe(true); expect(responseAllFeatureEntries).toStrictEqual([data]); }); it('batch set items', async () => { - const dataToStore: [UserStoragePathWithKeyOnly, string][] = [ + const dataToStore: [ + UserStorageFeatureKeys, + string, + ][] = [ ['0x123', JSON.stringify(MOCK_NOTIFICATIONS_DATA)], ['0x456', JSON.stringify(MOCK_NOTIFICATIONS_DATA)], ]; @@ -130,7 +140,10 @@ describe('User Storage', () => { }, ); - await userStorage.batchSetItems('accounts', dataToStore); + await userStorage.batchSetItems( + USER_STORAGE_FEATURE_NAMES.accounts, + dataToStore, + ); expect(mockPut.isDone()).toBe(true); }); @@ -140,7 +153,9 @@ describe('User Storage', () => { const mockDelete = await handleMockUserStorageDelete(); - await userStorage.deleteItem('notifications.notification_settings'); + await userStorage.deleteItem( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ); expect(mockDelete.isDone()).toBe(true); }); @@ -157,7 +172,9 @@ describe('User Storage', () => { }); await expect( - userStorage.deleteItem('notifications.notification_settings'), + userStorage.deleteItem( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(UserStorageError); }); @@ -167,7 +184,9 @@ describe('User Storage', () => { const mockDelete = await handleMockUserStorageDeleteAllFeatureEntries(); - await userStorage.deleteAllFeatureItems('notifications'); + await userStorage.deleteAllFeatureItems( + USER_STORAGE_FEATURE_NAMES.notifications, + ); expect(mockDelete.isDone()).toBe(true); }); @@ -184,10 +203,38 @@ describe('User Storage', () => { }); await expect( - userStorage.deleteAllFeatureItems('notifications'), + userStorage.deleteAllFeatureItems( + USER_STORAGE_FEATURE_NAMES.notifications, + ), ).rejects.toThrow(UserStorageError); }); + it('user storage: batch delete items', async () => { + const keysToDelete: UserStorageFeatureKeys< + typeof USER_STORAGE_FEATURE_NAMES.accounts + >[] = ['0x123', '0x456']; + const { auth } = arrangeAuth('SRP', MOCK_SRP); + const { userStorage } = arrangeUserStorage(auth); + + const mockPut = handleMockUserStorageBatchDelete( + undefined, + async (_, requestBody) => { + if (typeof requestBody === 'string') { + return; + } + + const expectedBody = keysToDelete.map((entryKey) => + createSHA256Hash(String(entryKey) + MOCK_STORAGE_KEY), + ); + + expect(requestBody.batch_delete).toStrictEqual(expectedBody); + }, + ); + + await userStorage.batchDeleteItems('accounts_v2', keysToDelete); + expect(mockPut.isDone()).toBe(true); + }); + it('user storage: failed to set key', async () => { const { auth } = arrangeAuth('SRP', MOCK_SRP); const { userStorage } = arrangeUserStorage(auth); @@ -202,7 +249,10 @@ describe('User Storage', () => { const data = JSON.stringify(MOCK_NOTIFICATIONS_DATA); await expect( - userStorage.setItem('notifications.notification_settings', data), + userStorage.setItem( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + data, + ), ).rejects.toThrow(UserStorageError); }); @@ -219,7 +269,29 @@ describe('User Storage', () => { }); await expect( - userStorage.batchSetItems('notifications', [['key', 'value']]), + userStorage.batchSetItems(USER_STORAGE_FEATURE_NAMES.notifications, [ + ['notification_settings', 'value'], + ]), + ).rejects.toThrow(UserStorageError); + }); + + it('user storage: failed to batch delete items', async () => { + const { auth } = arrangeAuth('SRP', MOCK_SRP); + const { userStorage } = arrangeUserStorage(auth); + + handleMockUserStorageBatchDelete({ + status: 401, + body: { + message: 'failed to insert storage entries', + error: 'generic-error', + }, + }); + + await expect( + userStorage.batchDeleteItems(USER_STORAGE_FEATURE_NAMES.accounts, [ + 'key', + 'key2', + ]), ).rejects.toThrow(UserStorageError); }); @@ -236,7 +308,9 @@ describe('User Storage', () => { }); await expect( - userStorage.getItem('notifications.notification_settings'), + userStorage.getItem( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(UserStorageError); }); @@ -253,7 +327,7 @@ describe('User Storage', () => { }); await expect( - userStorage.getAllFeatureItems('notifications'), + userStorage.getAllFeatureItems(USER_STORAGE_FEATURE_NAMES.notifications), ).rejects.toThrow(UserStorageError); }); @@ -270,7 +344,9 @@ describe('User Storage', () => { }); await expect( - userStorage.getItem('notifications.notification_settings'), + userStorage.getItem( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ), ).rejects.toThrow(NotFoundError); }); @@ -285,7 +361,7 @@ describe('User Storage', () => { handleMockUserStoragePut(); await userStorage.setItem( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, 'some fake data', ); expect(mockAuthSignMessage).toHaveBeenCalled(); // SignMessage called since generating new key diff --git a/packages/profile-sync-controller/src/sdk/user-storage.ts b/packages/profile-sync-controller/src/sdk/user-storage.ts index d527982f76..9376e90641 100644 --- a/packages/profile-sync-controller/src/sdk/user-storage.ts +++ b/packages/profile-sync-controller/src/sdk/user-storage.ts @@ -2,9 +2,10 @@ import encryption, { createSHA256Hash } from '../shared/encryption'; import type { Env } from '../shared/env'; import { getEnvUrls } from '../shared/env'; import type { + UserStorageFeatureKeys, + UserStorageFeatureNames, UserStoragePathWithFeatureAndKey, UserStoragePathWithFeatureOnly, - UserStoragePathWithKeyOnly, } from '../shared/storage-schema'; import { createEntryPath } from '../shared/storage-schema'; import type { IBaseAuth } from './authentication-jwt-bearer/types'; @@ -59,9 +60,9 @@ export class UserStorage { await this.#upsertUserStorage(path, value); } - async batchSetItems( - path: UserStoragePathWithFeatureOnly, - values: [UserStoragePathWithKeyOnly, string][], + async batchSetItems( + path: FeatureName, + values: [UserStorageFeatureKeys, string][], ) { await this.#batchUpsertUserStorage(path, values); } @@ -86,6 +87,13 @@ export class UserStorage { return this.#deleteUserStorageAllFeatureEntries(path); } + async batchDeleteItems( + path: FeatureName, + values: UserStorageFeatureKeys[], + ) { + return this.#batchDeleteUserStorage(path, values); + } + async getStorageKey(): Promise { const storageKey = await this.options.storage?.getStorageKey(); if (storageKey) { @@ -141,9 +149,9 @@ export class UserStorage { } } - async #batchUpsertUserStorage( - path: UserStoragePathWithFeatureOnly, - data: [UserStoragePathWithKeyOnly, string][], + async #batchUpsertUserStorage( + path: FeatureName, + data: [UserStorageFeatureKeys, string][], ): Promise { try { if (!data.length) { @@ -385,6 +393,53 @@ export class UserStorage { } } + async #batchDeleteUserStorage( + path: FeatureName, + data: UserStorageFeatureKeys[], + ): Promise { + try { + if (!data.length) { + return; + } + + const headers = await this.#getAuthorizationHeader(); + const storageKey = await this.getStorageKey(); + + const encryptedData = await Promise.all( + data.map(async (d) => this.#createEntryKey(d, storageKey)), + ); + + const url = new URL(STORAGE_URL(this.env, path)); + + const response = await fetch(url.toString(), { + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + ...headers, + }, + // eslint-disable-next-line @typescript-eslint/naming-convention + body: JSON.stringify({ batch_delete: encryptedData }), + }); + + if (!response.ok) { + const responseBody: ErrorMessage = await response.json().catch(() => ({ + message: 'unknown', + error: 'unknown', + })); + throw new Error( + `HTTP error message: ${responseBody.message}, error: ${responseBody.error}`, + ); + } + } catch (e) { + /* istanbul ignore next */ + const errorMessage = + e instanceof Error ? e.message : JSON.stringify(e ?? ''); + throw new UserStorageError( + `failed to batch delete user storage for path '${path}'. ${errorMessage}`, + ); + } + } + #createEntryKey(key: string, storageKey: string): string { const hashedKey = createSHA256Hash(key + storageKey); return hashedKey; diff --git a/packages/profile-sync-controller/src/shared/storage-schema.test.ts b/packages/profile-sync-controller/src/shared/storage-schema.test.ts index 744f74388a..95e096779e 100644 --- a/packages/profile-sync-controller/src/shared/storage-schema.test.ts +++ b/packages/profile-sync-controller/src/shared/storage-schema.test.ts @@ -2,6 +2,7 @@ import { createEntryPath, getFeatureAndKeyFromPath, USER_STORAGE_SCHEMA, + USER_STORAGE_FEATURE_NAMES, } from './storage-schema'; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -12,11 +13,11 @@ describe('user-storage/schema.ts', () => { it('should correctly construct user storage url', () => { expect( createEntryPath( - 'notifications.notification_settings', + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, 'dbdc994804e591f7bef6695e525543712358dd5c952bd257560b629887972588', ), ).toBe( - 'notifications/94739860a3472f61e0802706abbbbf7c8d843f8ec0ad0bef3964e52fb9b72132', + `${USER_STORAGE_FEATURE_NAMES.notifications}/94739860a3472f61e0802706abbbbf7c8d843f8ec0ad0bef3964e52fb9b72132`, ); }); @@ -37,7 +38,7 @@ describe('user-storage/schema.ts', () => { }); it('should throw error if key is invalid', () => { - const feature = 'notifications'; + const feature = USER_STORAGE_FEATURE_NAMES.notifications; const path = `${feature}.invalid`; const validKeys = USER_STORAGE_SCHEMA[feature].join(', '); @@ -49,19 +50,21 @@ describe('user-storage/schema.ts', () => { }); it('should return feature and key from path', () => { - const path = 'notifications.notification_settings'; - const result = getFeatureAndKeyFromPath(path); + const result = getFeatureAndKeyFromPath( + `${USER_STORAGE_FEATURE_NAMES.notifications}.notification_settings`, + ); expect(result).toStrictEqual({ - feature: 'notifications', + feature: USER_STORAGE_FEATURE_NAMES.notifications, key: 'notification_settings', }); }); it('should return feature and key from path with arbitrary key', () => { - const path = 'accounts.0x123'; - const result = getFeatureAndKeyFromPath(path); + const result = getFeatureAndKeyFromPath( + `${USER_STORAGE_FEATURE_NAMES.accounts}.0x123`, + ); expect(result).toStrictEqual({ - feature: 'accounts', + feature: USER_STORAGE_FEATURE_NAMES.accounts, key: '0x123', }); }); diff --git a/packages/profile-sync-controller/src/shared/storage-schema.ts b/packages/profile-sync-controller/src/shared/storage-schema.ts index 888ef93160..5ebc2a2c73 100644 --- a/packages/profile-sync-controller/src/shared/storage-schema.ts +++ b/packages/profile-sync-controller/src/shared/storage-schema.ts @@ -9,31 +9,36 @@ import { createSHA256Hash } from './encryption'; */ const ALLOW_ARBITRARY_KEYS = 'ALLOW_ARBITRARY_KEYS' as const; +export const USER_STORAGE_FEATURE_NAMES = { + notifications: 'notifications', + accounts: 'accounts_v2', + networks: 'networks', +} as const; + +export type UserStorageFeatureNames = + (typeof USER_STORAGE_FEATURE_NAMES)[keyof typeof USER_STORAGE_FEATURE_NAMES]; + export const USER_STORAGE_SCHEMA = { - notifications: ['notification_settings'], - accounts: [ALLOW_ARBITRARY_KEYS], // keyed by account addresses - networks: [ALLOW_ARBITRARY_KEYS], // keyed by chains/networks + [USER_STORAGE_FEATURE_NAMES.notifications]: ['notification_settings'], + [USER_STORAGE_FEATURE_NAMES.accounts]: [ALLOW_ARBITRARY_KEYS], // keyed by account addresses + [USER_STORAGE_FEATURE_NAMES.networks]: [ALLOW_ARBITRARY_KEYS], // keyed by chains/networks } as const; type UserStorageSchema = typeof USER_STORAGE_SCHEMA; -export type UserStorageFeatures = keyof UserStorageSchema; -export type UserStorageFeatureKeys = +export type UserStorageFeatureKeys = UserStorageSchema[Feature][0] extends typeof ALLOW_ARBITRARY_KEYS ? string : UserStorageSchema[Feature][number]; type UserStorageFeatureAndKey = { - feature: UserStorageFeatures; - key: UserStorageFeatureKeys; + feature: UserStorageFeatureNames; + key: UserStorageFeatureKeys; }; -export type UserStoragePathWithFeatureOnly = keyof UserStorageSchema; -export type UserStoragePathWithKeyOnly = { - [K in UserStorageFeatures]: `${UserStorageFeatureKeys}`; -}[UserStoragePathWithFeatureOnly]; +export type UserStoragePathWithFeatureOnly = UserStorageFeatureNames; export type UserStoragePathWithFeatureAndKey = { - [K in UserStorageFeatures]: `${K}.${UserStorageFeatureKeys}`; + [K in UserStorageFeatureNames]: `${K}.${UserStorageFeatureKeys}`; }[UserStoragePathWithFeatureOnly]; export const getFeatureAndKeyFromPath = ( @@ -48,8 +53,8 @@ export const getFeatureAndKeyFromPath = ( } const [feature, key] = path.split('.') as [ - UserStorageFeatures, - UserStorageFeatureKeys, + UserStorageFeatureNames, + UserStorageFeatureKeys, ]; if (!(feature in USER_STORAGE_SCHEMA)) { diff --git a/packages/queued-request-controller/src/QueuedRequestMiddleware.test.ts b/packages/queued-request-controller/src/QueuedRequestMiddleware.test.ts index 6af151aae0..80738262d3 100644 --- a/packages/queued-request-controller/src/QueuedRequestMiddleware.test.ts +++ b/packages/queued-request-controller/src/QueuedRequestMiddleware.test.ts @@ -86,7 +86,6 @@ describe('createQueuedRequestMiddleware', () => { const mockEnqueueRequest = getMockEnqueueRequest(); const middleware = buildQueuedRequestMiddleware({ enqueueRequest: mockEnqueueRequest, - useRequestQueue: () => true, }); const request = { @@ -105,7 +104,7 @@ describe('createQueuedRequestMiddleware', () => { const mockEnqueueRequest = getMockEnqueueRequest(); const middleware = buildQueuedRequestMiddleware({ enqueueRequest: mockEnqueueRequest, - useRequestQueue: () => true, + shouldEnqueueRequest: ({ method }) => method === 'method_with_confirmation', }); @@ -145,7 +144,6 @@ describe('createQueuedRequestMiddleware', () => { it('calls next after a request is queued and processed', async () => { const middleware = buildQueuedRequestMiddleware({ enqueueRequest: getMockEnqueueRequest(), - useRequestQueue: () => true, }); const request = { ...getRequestDefaults(), @@ -167,7 +165,7 @@ describe('createQueuedRequestMiddleware', () => { enqueueRequest: jest .fn() .mockRejectedValue(new Error('enqueuing error')), - useRequestQueue: () => true, + shouldEnqueueRequest: () => true, }); const request = { @@ -191,7 +189,7 @@ describe('createQueuedRequestMiddleware', () => { enqueueRequest: jest .fn() .mockRejectedValue(new Error('enqueuing error')), - useRequestQueue: () => true, + shouldEnqueueRequest: () => true, }); const request = { @@ -271,7 +269,6 @@ function buildQueuedRequestMiddleware( ) { const options = { enqueueRequest: getMockEnqueueRequest(), - useRequestQueue: () => false, shouldEnqueueRequest: () => false, ...overrideOptions, }; diff --git a/packages/queued-request-controller/src/QueuedRequestMiddleware.ts b/packages/queued-request-controller/src/QueuedRequestMiddleware.ts index 5edecf787e..5b52fb649b 100644 --- a/packages/queued-request-controller/src/QueuedRequestMiddleware.ts +++ b/packages/queued-request-controller/src/QueuedRequestMiddleware.ts @@ -38,17 +38,14 @@ function hasRequiredMetadata( * * @param options - Configuration options. * @param options.enqueueRequest - A method for enqueueing a request. - * @param options.useRequestQueue - A function that determines if the request queue feature is enabled. * @param options.shouldEnqueueRequest - A function that returns if a request should be handled by the QueuedRequestController. * @returns The JSON-RPC middleware that manages queued requests. */ export const createQueuedRequestMiddleware = ({ enqueueRequest, - useRequestQueue, shouldEnqueueRequest, }: { enqueueRequest: QueuedRequestController['enqueueRequest']; - useRequestQueue: () => boolean; shouldEnqueueRequest: ( request: QueuedRequestMiddlewareJsonRpcRequest, ) => boolean; @@ -56,9 +53,8 @@ export const createQueuedRequestMiddleware = ({ return createAsyncMiddleware(async (req: JsonRpcRequest, res, next) => { hasRequiredMetadata(req); - // if the request queue feature is turned off, or this method is not a confirmation method - // bypass the queue completely - if (!useRequestQueue() || !shouldEnqueueRequest(req)) { + // if this method is not a confirmation method bypass the queue completely + if (!shouldEnqueueRequest(req)) { return await next(); } diff --git a/packages/selected-network-controller/src/SelectedNetworkController.ts b/packages/selected-network-controller/src/SelectedNetworkController.ts index 8f73418122..ab87c541ca 100644 --- a/packages/selected-network-controller/src/SelectedNetworkController.ts +++ b/packages/selected-network-controller/src/SelectedNetworkController.ts @@ -102,10 +102,6 @@ export type SelectedNetworkControllerMessenger = RestrictedControllerMessenger< export type SelectedNetworkControllerOptions = { state?: SelectedNetworkControllerState; messenger: SelectedNetworkControllerMessenger; - useRequestQueuePreference: boolean; - onPreferencesStateChange: ( - listener: (preferencesState: { useRequestQueue: boolean }) => void, - ) => void; domainProxyMap: Map; }; @@ -124,23 +120,17 @@ export class SelectedNetworkController extends BaseController< > { #domainProxyMap: Map; - #useRequestQueuePreference: boolean; - /** * Construct a SelectedNetworkController controller. * * @param options - The controller options. * @param options.messenger - The restricted controller messenger for the EncryptionPublicKey controller. * @param options.state - The controllers initial state. - * @param options.useRequestQueuePreference - A boolean indicating whether to use the request queue preference. - * @param options.onPreferencesStateChange - A callback that is called when the preference state changes. * @param options.domainProxyMap - A map for storing domain-specific proxies that are held in memory only during use. */ constructor({ messenger, state = getDefaultState(), - useRequestQueuePreference, - onPreferencesStateChange, domainProxyMap, }: SelectedNetworkControllerOptions) { super({ @@ -149,7 +139,6 @@ export class SelectedNetworkController extends BaseController< messenger, state, }); - this.#useRequestQueuePreference = useRequestQueuePreference; this.#domainProxyMap = domainProxyMap; this.#registerMessageHandlers(); @@ -247,21 +236,6 @@ export class SelectedNetworkController extends BaseController< } }, ); - - onPreferencesStateChange(({ useRequestQueue }) => { - if (this.#useRequestQueuePreference !== useRequestQueue) { - if (!useRequestQueue) { - // Loop through all domains and points each domain's proxy - // to the NetworkController's own proxy of the globally selected networkClient - Object.keys(this.state.domains).forEach((domain) => { - this.#unsetNetworkClientIdForDomain(domain); - }); - } else { - this.#resetAllPermissionedDomains(); - } - this.#useRequestQueuePreference = useRequestQueue; - } - }); } #registerMessageHandlers(): void { @@ -326,31 +300,10 @@ export class SelectedNetworkController extends BaseController< ); } - // Loop through all domains and for those with permissions it points that domain's proxy - // to an unproxied instance of the globally selected network client. - // NOT the NetworkController's proxy of the globally selected networkClient - #resetAllPermissionedDomains() { - this.#domainProxyMap.forEach((_: NetworkProxy, domain: string) => { - const { selectedNetworkClientId } = this.messagingSystem.call( - 'NetworkController:getState', - ); - // can't use public setNetworkClientIdForDomain because it will throw an error - // rather than simply skip if the domain doesn't have permissions which can happen - // in this case since proxies are added for each site the user visits - if (this.#domainHasPermissions(domain)) { - this.#setNetworkClientIdForDomain(domain, selectedNetworkClientId); - } - }); - } - setNetworkClientIdForDomain( domain: Domain, networkClientId: NetworkClientId, ) { - if (!this.#useRequestQueuePreference) { - return; - } - if (domain === METAMASK_DOMAIN) { throw new Error( `NetworkClientId for domain "${METAMASK_DOMAIN}" cannot be set on the SelectedNetworkController`, @@ -373,9 +326,6 @@ export class SelectedNetworkController extends BaseController< getNetworkClientIdForDomain(domain: Domain): NetworkClientId { const { selectedNetworkClientId: metamaskSelectedNetworkClientId } = this.messagingSystem.call('NetworkController:getState'); - if (!this.#useRequestQueuePreference) { - return metamaskSelectedNetworkClientId; - } return this.state.domains[domain] ?? metamaskSelectedNetworkClientId; } @@ -403,10 +353,7 @@ export class SelectedNetworkController extends BaseController< let networkProxy = this.#domainProxyMap.get(domain); if (networkProxy === undefined) { let networkClient; - if ( - this.#useRequestQueuePreference && - this.#domainHasPermissions(domain) - ) { + if (this.#domainHasPermissions(domain)) { const networkClientId = this.getNetworkClientIdForDomain(domain); networkClient = this.messagingSystem.call( 'NetworkController:getNetworkClientById', @@ -416,10 +363,11 @@ export class SelectedNetworkController extends BaseController< networkClient = this.messagingSystem.call( 'NetworkController:getSelectedNetworkClient', ); - if (networkClient === undefined) { - throw new Error('Selected network not initialized'); - } } + if (networkClient === undefined) { + throw new Error('Selected network not initialized'); + } + networkProxy = { provider: createEventEmitterProxy(networkClient.provider), blockTracker: createEventEmitterProxy(networkClient.blockTracker, { diff --git a/packages/selected-network-controller/tests/SelectedNetworkController.test.ts b/packages/selected-network-controller/tests/SelectedNetworkController.test.ts index fd975016ca..dcc01ae78d 100644 --- a/packages/selected-network-controller/tests/SelectedNetworkController.test.ts +++ b/packages/selected-network-controller/tests/SelectedNetworkController.test.ts @@ -121,15 +121,10 @@ jest.mock('@metamask/swappable-obj-proxy'); const setup = ({ getSubjectNames = [], state, - useRequestQueuePreference = false, domainProxyMap = new Map(), }: { state?: SelectedNetworkControllerState; getSubjectNames?: string[]; - useRequestQueuePreference?: boolean; - onPreferencesStateChange?: ( - listener: (preferencesState: { useRequestQueue: boolean }) => void, - ) => void; domainProxyMap?: Map; } = {}) => { const mockProviderProxy = { @@ -173,34 +168,18 @@ const setup = ({ getSubjectNames, }); - const preferencesStateChangeListeners: ((state: { - useRequestQueue: boolean; - }) => void)[] = []; const controller = new SelectedNetworkController({ messenger: restrictedMessenger, state, - useRequestQueuePreference, - onPreferencesStateChange: (listener) => { - preferencesStateChangeListeners.push(listener); - }, domainProxyMap, }); - const triggerPreferencesStateChange = (preferencesState: { - useRequestQueue: boolean; - }) => { - for (const listener of preferencesStateChangeListeners) { - listener(preferencesState); - } - }; - return { controller, messenger, mockProviderProxy, mockBlockTrackerProxy, domainProxyMap, - triggerPreferencesStateChange, createEventEmitterProxyMock, ...mockMessengerActions, }; @@ -226,296 +205,232 @@ describe('SelectedNetworkController', () => { }); }); - describe('when useRequestQueuePreference is true', () => { - it('should set networkClientId for domains not already in state', async () => { - const { controller } = setup({ - state: { - domains: { - 'existingdomain.com': 'initialNetworkId', - }, + it('should set networkClientId for domains not already in state', async () => { + const { controller } = setup({ + state: { + domains: { + 'existingdomain.com': 'initialNetworkId', }, - getSubjectNames: ['newdomain.com'], - useRequestQueuePreference: true, - }); - - expect(controller.state.domains).toStrictEqual({ - 'newdomain.com': 'mainnet', - 'existingdomain.com': 'initialNetworkId', - }); + }, + getSubjectNames: ['newdomain.com'], }); - it('should not modify domains already in state', async () => { - const { controller } = setup({ - state: { - domains: { - 'existingdomain.com': 'initialNetworkId', - }, - }, - getSubjectNames: ['existingdomain.com'], - useRequestQueuePreference: true, - }); - - expect(controller.state.domains).toStrictEqual({ - 'existingdomain.com': 'initialNetworkId', - }); + expect(controller.state.domains).toStrictEqual({ + 'newdomain.com': 'mainnet', + 'existingdomain.com': 'initialNetworkId', }); }); - describe('when useRequestQueuePreference is false', () => { - it('should not set networkClientId for new domains', async () => { - const { controller } = setup({ - state: { - domains: { - 'existingdomain.com': 'initialNetworkId', - }, + it('should not modify domains already in state', async () => { + const { controller } = setup({ + state: { + domains: { + 'existingdomain.com': 'initialNetworkId', }, - getSubjectNames: ['newdomain.com'], - }); - - expect(controller.state.domains).toStrictEqual({ - 'existingdomain.com': 'initialNetworkId', - }); + }, + getSubjectNames: ['existingdomain.com'], }); - it('should not modify domains already in state', async () => { - const { controller } = setup({ - state: { - domains: { - 'existingdomain.com': 'initialNetworkId', - }, - }, - getSubjectNames: ['existingdomain.com'], - }); - - expect(controller.state.domains).toStrictEqual({ - 'existingdomain.com': 'initialNetworkId', - }); + expect(controller.state.domains).toStrictEqual({ + 'existingdomain.com': 'initialNetworkId', }); }); - }); - - describe('NetworkController:stateChange', () => { - describe('when a network is deleted from the network controller', () => { - const initialDomains = { - 'not-deleted-network.com': 'linea-mainnet', - 'deleted-network.com': 'goerli', - }; - - const deleteNetwork = ( - chainId: Hex, - networkControllerState: NetworkState, - messenger: ReturnType, - mockNetworkControllerGetState: jest.Mock, - ) => { - delete networkControllerState.networkConfigurationsByChainId[chainId]; - mockNetworkControllerGetState.mockReturnValueOnce( - networkControllerState, - ); - messenger.publish( - 'NetworkController:stateChange', - networkControllerState, - [ - { - op: 'remove', - path: ['networkConfigurationsByChainId', chainId], - }, - ], - ); - }; - - it('does not update state when useRequestQueuePreference is false', () => { - const { controller, messenger, mockNetworkControllerGetState } = setup({ - state: { domains: initialDomains }, - useRequestQueuePreference: false, - }); - - const networkControllerState = getDefaultNetworkControllerState(); - deleteNetwork( - '0x5', - networkControllerState, - messenger, - mockNetworkControllerGetState, - ); - expect(controller.state.domains).toStrictEqual(initialDomains); - }); - - it('redirects domains to the globally selected network when useRequestQueuePreference is true', () => { - const { controller, messenger, mockNetworkControllerGetState } = setup({ - state: { domains: initialDomains }, - useRequestQueuePreference: true, - }); + describe('NetworkController:stateChange', () => { + describe('when a network is deleted from the network controller', () => { + const initialDomains = { + 'not-deleted-network.com': 'linea-mainnet', + 'deleted-network.com': 'goerli', + }; - const networkControllerState = { - ...getDefaultNetworkControllerState(), - selectedNetworkClientId: 'mainnet', + const deleteNetwork = ( + chainId: Hex, + networkControllerState: NetworkState, + messenger: ReturnType, + mockNetworkControllerGetState: jest.Mock, + ) => { + delete networkControllerState.networkConfigurationsByChainId[chainId]; + mockNetworkControllerGetState.mockReturnValueOnce( + networkControllerState, + ); + messenger.publish( + 'NetworkController:stateChange', + networkControllerState, + [ + { + op: 'remove', + path: ['networkConfigurationsByChainId', chainId], + }, + ], + ); }; - deleteNetwork( - '0x5', - networkControllerState, - messenger, - mockNetworkControllerGetState, - ); + it('redirects domains to the globally selected network', () => { + const { controller, messenger, mockNetworkControllerGetState } = + setup({ + state: { domains: initialDomains }, + }); - expect(controller.state.domains).toStrictEqual({ - ...initialDomains, - 'deleted-network.com': networkControllerState.selectedNetworkClientId, - }); - }); + const networkControllerState = { + ...getDefaultNetworkControllerState(), + selectedNetworkClientId: 'mainnet', + }; - it('redirects domains to the globally selected network when useRequestQueuePreference is true and handles garbage collected proxies', () => { - const domainProxyMap = new Map(); - const { - controller, - messenger, - mockNetworkControllerGetState, - mockGetNetworkClientById, - } = setup({ - state: { domains: initialDomains }, - useRequestQueuePreference: true, - domainProxyMap, + deleteNetwork( + '0x5', + networkControllerState, + messenger, + mockNetworkControllerGetState, + ); + + expect(controller.state.domains).toStrictEqual({ + ...initialDomains, + 'deleted-network.com': + networkControllerState.selectedNetworkClientId, + }); }); - // Simulate proxies being garbage collected - domainProxyMap.clear(); + it('redirects domains to the globally selected network and handles garbage collected proxies', () => { + const domainProxyMap = new Map(); + const { + controller, + messenger, + mockNetworkControllerGetState, + mockGetNetworkClientById, + } = setup({ + state: { domains: initialDomains }, - const networkControllerState = { - ...getDefaultNetworkControllerState(), - selectedNetworkClientId: 'mainnet', - }; + domainProxyMap, + }); - mockGetNetworkClientById.mockImplementation((id) => { - // Simulate the previous domain being deleted in NetworkController - if (id !== 'mainnet') { - throw new Error('Network client does not exist'); - } + // Simulate proxies being garbage collected + domainProxyMap.clear(); - return { - provider: { request: jest.fn() }, - blockTracker: { getLatestBlock: jest.fn() }, + const networkControllerState = { + ...getDefaultNetworkControllerState(), + selectedNetworkClientId: 'mainnet', }; - }); - deleteNetwork( - '0x5', - networkControllerState, - messenger, - mockNetworkControllerGetState, - ); + mockGetNetworkClientById.mockImplementation((id) => { + // Simulate the previous domain being deleted in NetworkController + if (id !== 'mainnet') { + throw new Error('Network client does not exist'); + } - expect(controller.state.domains).toStrictEqual({ - ...initialDomains, - 'deleted-network.com': networkControllerState.selectedNetworkClientId, - }); - }); - }); + return { + provider: { request: jest.fn() }, + blockTracker: { getLatestBlock: jest.fn() }, + }; + }); - describe('when a network is updated', () => { - it('redirects domains when the default rpc endpoint is switched', () => { - const initialDomains = { - 'different-chain.com': 'mainnet', - 'chain-with-new-default.com': 'goerli', - }; + deleteNetwork( + '0x5', + networkControllerState, + messenger, + mockNetworkControllerGetState, + ); - const { controller, messenger, mockNetworkControllerGetState } = setup({ - state: { domains: initialDomains }, - useRequestQueuePreference: true, + expect(controller.state.domains).toStrictEqual({ + ...initialDomains, + 'deleted-network.com': + networkControllerState.selectedNetworkClientId, + }); }); + }); - const networkControllerState = getDefaultNetworkControllerState(); - const goerliNetwork = - networkControllerState.networkConfigurationsByChainId['0x5']; + describe('when a network is updated', () => { + it('redirects domains when the default rpc endpoint is switched', () => { + const initialDomains = { + 'different-chain.com': 'mainnet', + 'chain-with-new-default.com': 'goerli', + }; - goerliNetwork.defaultRpcEndpointIndex = - goerliNetwork.rpcEndpoints.push({ - type: RpcEndpointType.Custom, - url: 'https://new-default.com', - networkClientId: 'new-default-network-client-id', - }) - 1; + const { controller, messenger, mockNetworkControllerGetState } = + setup({ + state: { domains: initialDomains }, + }); - mockNetworkControllerGetState.mockReturnValueOnce( - networkControllerState, - ); + const networkControllerState = getDefaultNetworkControllerState(); + const goerliNetwork = + networkControllerState.networkConfigurationsByChainId['0x5']; - messenger.publish( - 'NetworkController:stateChange', - networkControllerState, - [ - { - op: 'replace', - path: ['networkConfigurationsByChainId', '0x5'], - }, - ], - ); + goerliNetwork.defaultRpcEndpointIndex = + goerliNetwork.rpcEndpoints.push({ + type: RpcEndpointType.Custom, + url: 'https://new-default.com', + networkClientId: 'new-default-network-client-id', + }) - 1; - expect(controller.state.domains).toStrictEqual({ - ...initialDomains, - 'chain-with-new-default.com': 'new-default-network-client-id', - }); - }); + mockNetworkControllerGetState.mockReturnValueOnce( + networkControllerState, + ); - it('redirects domains when the default rpc endpoint is deleted and replaced', () => { - const initialDomains = { - 'different-chain.com': 'mainnet', - 'chain-with-new-default.com': 'goerli', - }; + messenger.publish( + 'NetworkController:stateChange', + networkControllerState, + [ + { + op: 'replace', + path: ['networkConfigurationsByChainId', '0x5'], + }, + ], + ); - const { controller, messenger, mockNetworkControllerGetState } = setup({ - state: { domains: initialDomains }, - useRequestQueuePreference: true, + expect(controller.state.domains).toStrictEqual({ + ...initialDomains, + 'chain-with-new-default.com': 'new-default-network-client-id', + }); }); - const networkControllerState = getDefaultNetworkControllerState(); - const goerliNetwork = - networkControllerState.networkConfigurationsByChainId['0x5']; + it('redirects domains when the default rpc endpoint is deleted and replaced', () => { + const initialDomains = { + 'different-chain.com': 'mainnet', + 'chain-with-new-default.com': 'goerli', + }; - goerliNetwork.rpcEndpoints = [ - { - type: RpcEndpointType.Custom, - url: 'https://new-default.com', - networkClientId: 'new-default-network-client-id', - }, - ]; + const { controller, messenger, mockNetworkControllerGetState } = + setup({ + state: { domains: initialDomains }, + }); - mockNetworkControllerGetState.mockReturnValueOnce( - networkControllerState, - ); + const networkControllerState = getDefaultNetworkControllerState(); + const goerliNetwork = + networkControllerState.networkConfigurationsByChainId['0x5']; - messenger.publish( - 'NetworkController:stateChange', - networkControllerState, - [ + goerliNetwork.rpcEndpoints = [ { - op: 'replace', - path: ['networkConfigurationsByChainId', '0x5'], + type: RpcEndpointType.Custom, + url: 'https://new-default.com', + networkClientId: 'new-default-network-client-id', }, - ], - ); + ]; - expect(controller.state.domains).toStrictEqual({ - ...initialDomains, - 'chain-with-new-default.com': 'new-default-network-client-id', - }); - }); - }); - }); + mockNetworkControllerGetState.mockReturnValueOnce( + networkControllerState, + ); - describe('setNetworkClientIdForDomain', () => { - it('does not update state when the useRequestQueuePreference is false', () => { - const { controller } = setup({ - state: { - domains: {}, - }, - }); + messenger.publish( + 'NetworkController:stateChange', + networkControllerState, + [ + { + op: 'replace', + path: ['networkConfigurationsByChainId', '0x5'], + }, + ], + ); - controller.setNetworkClientIdForDomain('1.com', '1'); - expect(controller.state.domains).toStrictEqual({}); + expect(controller.state.domains).toStrictEqual({ + ...initialDomains, + 'chain-with-new-default.com': 'new-default-network-client-id', + }); + }); + }); }); - describe('when useRequestQueuePreference is true', () => { + describe('setNetworkClientIdForDomain', () => { it('should throw an error when passed "metamask" as domain arg', () => { - const { controller } = setup({ useRequestQueuePreference: true }); + const { controller } = setup(); expect(() => { controller.setNetworkClientIdForDomain('metamask', 'mainnet'); }).toThrow( @@ -528,7 +443,6 @@ describe('SelectedNetworkController', () => { it('skips setting the networkClientId for the passed in domain', () => { const { controller, mockHasPermissions } = setup({ state: { domains: {} }, - useRequestQueuePreference: true, }); mockHasPermissions.mockReturnValue(true); const snapDomainOne = 'npm:@metamask/bip32-example-snap'; @@ -559,7 +473,6 @@ describe('SelectedNetworkController', () => { it('sets the networkClientId for the passed in domain', () => { const { controller, mockHasPermissions } = setup({ state: { domains: {} }, - useRequestQueuePreference: true, }); mockHasPermissions.mockReturnValue(true); const domain = 'example.com'; @@ -571,7 +484,6 @@ describe('SelectedNetworkController', () => { it('updates the provider and block tracker proxy when they already exist for the domain', () => { const { controller, mockProviderProxy, mockHasPermissions } = setup({ state: { domains: {} }, - useRequestQueuePreference: true, }); mockHasPermissions.mockReturnValue(true); const initialNetworkClientId = '123'; @@ -603,7 +515,6 @@ describe('SelectedNetworkController', () => { it('throws an error and does not set the networkClientId for the passed in domain', () => { const { controller, mockHasPermissions } = setup({ state: { domains: {} }, - useRequestQueuePreference: true, }); mockHasPermissions.mockReturnValue(false); @@ -618,17 +529,8 @@ describe('SelectedNetworkController', () => { }); }); }); - }); - - describe('getNetworkClientIdForDomain', () => { - it('returns the selectedNetworkClientId from the NetworkController when useRequestQueuePreference is false', () => { - const { controller } = setup(); - expect(controller.getNetworkClientIdForDomain('example.com')).toBe( - 'mainnet', - ); - }); - describe('when useRequestQueuePreference is true', () => { + describe('getNetworkClientIdForDomain', () => { it('returns the networkClientId from state when a networkClientId has been set for the requested domain', () => { const { controller } = setup({ state: { @@ -636,7 +538,6 @@ describe('SelectedNetworkController', () => { 'example.com': '1', }, }, - useRequestQueuePreference: true, }); const result = controller.getNetworkClientIdForDomain('example.com'); @@ -646,480 +547,286 @@ describe('SelectedNetworkController', () => { it('returns the selectedNetworkClientId from the NetworkController when no networkClientId has been set for the requested domain', () => { const { controller } = setup({ state: { domains: {} }, - useRequestQueuePreference: true, }); expect(controller.getNetworkClientIdForDomain('example.com')).toBe( 'mainnet', ); }); }); - }); - - describe('getProviderAndBlockTracker', () => { - it('returns the cached proxy provider and block tracker when the domain already has a cached networkProxy in the domainProxyMap', () => { - const mockProxyProvider = { - setTarget: jest.fn(), - } as unknown as ProviderProxy; - const mockProxyBlockTracker = { - setTarget: jest.fn(), - } as unknown as BlockTrackerProxy; - - const domainProxyMap = new Map([ - [ - 'example.com', - { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, - }, - ], - [ - 'test.com', - { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, - }, - ], - ]); - const { controller } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: true, - domainProxyMap, - }); - - const result = controller.getProviderAndBlockTracker('example.com'); - expect(result).toStrictEqual({ - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, - }); - }); - - describe('when the domain does not have a cached networkProxy in the domainProxyMap and useRequestQueuePreference is true', () => { - describe('when the domain has permissions', () => { - it('calls to NetworkController:getNetworkClientById and creates a new proxy provider and block tracker with the non-proxied globally selected network client', () => { - const { controller, messenger, mockHasPermissions } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: true, - }); - jest.spyOn(messenger, 'call'); - mockHasPermissions.mockReturnValue(true); - const result = controller.getProviderAndBlockTracker('example.com'); - expect(result).toBeDefined(); - // unfortunately checking which networkController method is called is the best - // proxy (no pun intended) for checking that the correct instance of the networkClient is used - expect(messenger.call).toHaveBeenCalledWith( - 'NetworkController:getNetworkClientById', - 'mainnet', - ); - }); - }); + describe('getProviderAndBlockTracker', () => { + it('returns the cached proxy provider and block tracker when the domain already has a cached networkProxy in the domainProxyMap', () => { + const mockProxyProvider = { + setTarget: jest.fn(), + } as unknown as ProviderProxy; + const mockProxyBlockTracker = { + setTarget: jest.fn(), + } as unknown as BlockTrackerProxy; - describe('when the domain does not have permissions', () => { - it('calls to NetworkController:getSelectedNetworkClient and creates a new proxy provider and block tracker with the proxied globally selected network client', () => { - const { controller, messenger, mockHasPermissions } = setup({ - state: { - domains: {}, + const domainProxyMap = new Map([ + [ + 'example.com', + { + provider: mockProxyProvider, + blockTracker: mockProxyBlockTracker, }, - useRequestQueuePreference: true, - }); - jest.spyOn(messenger, 'call'); - mockHasPermissions.mockReturnValue(false); - const result = controller.getProviderAndBlockTracker('example.com'); - expect(result).toBeDefined(); - // unfortunately checking which networkController method is called is the best - // proxy (no pun intended) for checking that the correct instance of the networkClient is used - expect(messenger.call).toHaveBeenCalledWith( - 'NetworkController:getSelectedNetworkClient', - ); - }); - - it('throws an error if the globally selected network client is not initialized', () => { - const { controller, mockGetSelectedNetworkClient } = setup({ - state: { - domains: {}, + ], + [ + 'test.com', + { + provider: mockProxyProvider, + blockTracker: mockProxyBlockTracker, }, - useRequestQueuePreference: false, - }); - mockGetSelectedNetworkClient.mockReturnValue(undefined); - expect(() => - controller.getProviderAndBlockTracker('example.com'), - ).toThrow('Selected network not initialized'); - }); - }); - }); - - describe('when the domain does not have a cached networkProxy in the domainProxyMap and useRequestQueuePreference is false', () => { - it('calls to NetworkController:getSelectedNetworkClient and creates a new proxy provider and block tracker with the proxied globally selected network client', () => { - const { controller, messenger } = setup({ + ], + ]); + const { controller } = setup({ state: { domains: {}, }, - useRequestQueuePreference: false, + + domainProxyMap, }); - jest.spyOn(messenger, 'call'); const result = controller.getProviderAndBlockTracker('example.com'); - expect(result).toBeDefined(); - // unfortunately checking which networkController method is called is the best - // proxy (no pun intended) for checking that the correct instance of the networkClient is used - expect(messenger.call).toHaveBeenCalledWith( - 'NetworkController:getSelectedNetworkClient', - ); - }); - }); - - // TODO - improve these tests by using a full NetworkController and doing more robust behavioral testing - describe('when the domain is a snap (starts with "npm:" or "local:")', () => { - it('returns a proxied globally selected networkClient and does not create a new proxy in the domainProxyMap', () => { - const { controller, domainProxyMap, messenger } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: true, + expect(result).toStrictEqual({ + provider: mockProxyProvider, + blockTracker: mockProxyBlockTracker, }); - jest.spyOn(messenger, 'call'); - const snapDomain = 'npm:@metamask/bip32-example-snap'; - - const result = controller.getProviderAndBlockTracker(snapDomain); - - expect(domainProxyMap.get(snapDomain)).toBeUndefined(); - expect(messenger.call).toHaveBeenCalledWith( - 'NetworkController:getSelectedNetworkClient', - ); - expect(result).toBeDefined(); }); - it('throws an error if the globally selected network client is not initialized', () => { - const { controller, mockGetSelectedNetworkClient } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: false, - }); - const snapDomain = 'npm:@metamask/bip32-example-snap'; + it('throws an error if passed a domain that does not have permissions and the globally selected network client is not initialized', () => { + const { controller, mockGetSelectedNetworkClient, mockHasPermissions } = + setup(); mockGetSelectedNetworkClient.mockReturnValue(undefined); - - expect(() => controller.getProviderAndBlockTracker(snapDomain)).toThrow( + mockHasPermissions.mockReturnValue(false); + expect(() => controller.getProviderAndBlockTracker('test.com')).toThrow( 'Selected network not initialized', ); }); - }); - - describe('when the domain is a "metamask"', () => { - it('returns a proxied globally selected networkClient and does not create a new proxy in the domainProxyMap', () => { - const { controller, domainProxyMap, messenger } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: true, - }); - jest.spyOn(messenger, 'call'); - const result = controller.getProviderAndBlockTracker(METAMASK_DOMAIN); - - expect(result).toBeDefined(); - expect(domainProxyMap.get(METAMASK_DOMAIN)).toBeUndefined(); - expect(messenger.call).toHaveBeenCalledWith( - 'NetworkController:getSelectedNetworkClient', + it('throws and error if passed a domain that has permissions and the globally selected network client is not initialized', () => { + const { controller, mockGetNetworkClientById, mockHasPermissions } = + setup(); + mockGetNetworkClientById.mockReturnValue(undefined); + mockHasPermissions.mockReturnValue(true); + expect(() => controller.getProviderAndBlockTracker('test.com')).toThrow( + 'Selected network not initialized', ); }); - it('throws an error if the globally selected network client is not initialized', () => { - const { controller, mockGetSelectedNetworkClient } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: false, + describe('when the domain does not have a cached networkProxy in the domainProxyMap', () => { + describe('when the domain has permissions', () => { + it('calls to NetworkController:getNetworkClientById and creates a new proxy provider and block tracker with the non-proxied globally selected network client', () => { + const { controller, messenger, mockHasPermissions } = setup({ + state: { + domains: {}, + }, + }); + jest.spyOn(messenger, 'call'); + mockHasPermissions.mockReturnValue(true); + + const result = controller.getProviderAndBlockTracker('example.com'); + expect(result).toBeDefined(); + // unfortunately checking which networkController method is called is the best + // proxy (no pun intended) for checking that the correct instance of the networkClient is used + expect(messenger.call).toHaveBeenCalledWith( + 'NetworkController:getNetworkClientById', + 'mainnet', + ); + }); }); - mockGetSelectedNetworkClient.mockReturnValue(undefined); - - expect(() => - controller.getProviderAndBlockTracker(METAMASK_DOMAIN), - ).toThrow('Selected network not initialized'); - }); - }); - }); - describe('PermissionController:stateChange', () => { - describe('on permission add', () => { - it('should add new domain to domains list when useRequestQueuePreference is true', async () => { - const { controller, messenger } = setup({ - useRequestQueuePreference: true, + describe('when the domain does not have permissions', () => { + it('calls to NetworkController:getSelectedNetworkClient and creates a new proxy provider and block tracker with the proxied globally selected network client', () => { + const { controller, messenger, mockHasPermissions } = setup({ + state: { + domains: {}, + }, + }); + jest.spyOn(messenger, 'call'); + mockHasPermissions.mockReturnValue(false); + const result = controller.getProviderAndBlockTracker('example.com'); + expect(result).toBeDefined(); + // unfortunately checking which networkController method is called is the best + // proxy (no pun intended) for checking that the correct instance of the networkClient is used + expect(messenger.call).toHaveBeenCalledWith( + 'NetworkController:getSelectedNetworkClient', + ); + }); }); - const mockPermission = { - parentCapability: 'eth_accounts', - id: 'example.com', - date: Date.now(), - caveats: [{ type: 'restrictToAccounts', value: ['0x...'] }], - }; - - messenger.publish( - 'PermissionController:stateChange', - { subjects: {} }, - [ - { - op: 'add', - path: ['subjects', 'example.com', 'permissions'], - value: mockPermission, - }, - ], - ); - - const { domains } = controller.state; - expect(domains['example.com']).toBeDefined(); }); - it('should not add new domain to domains list when useRequestQueuePreference is false', async () => { - const { controller, messenger } = setup({}); - const mockPermission = { - parentCapability: 'eth_accounts', - id: 'example.com', - date: Date.now(), - caveats: [{ type: 'restrictToAccounts', value: ['0x...'] }], - }; - - messenger.publish( - 'PermissionController:stateChange', - { subjects: {} }, - [ - { - op: 'add', - path: ['subjects', 'example.com', 'permissions'], - value: mockPermission, + // TODO - improve these tests by using a full NetworkController and doing more robust behavioral testing + describe('when the domain is a snap (starts with "npm:" or "local:")', () => { + it('returns a proxied globally selected networkClient and does not create a new proxy in the domainProxyMap', () => { + const { controller, domainProxyMap, messenger } = setup({ + state: { + domains: {}, }, - ], - ); + }); + jest.spyOn(messenger, 'call'); + const snapDomain = 'npm:@metamask/bip32-example-snap'; - const { domains } = controller.state; - expect(domains['example.com']).toBeUndefined(); - }); - }); + const result = controller.getProviderAndBlockTracker(snapDomain); - describe('on permission removal', () => { - it('should remove domain from domains list', async () => { - const { controller, messenger } = setup({ - state: { domains: { 'example.com': 'foo' } }, + expect(domainProxyMap.get(snapDomain)).toBeUndefined(); + expect(messenger.call).toHaveBeenCalledWith( + 'NetworkController:getSelectedNetworkClient', + ); + expect(result).toBeDefined(); }); - messenger.publish( - 'PermissionController:stateChange', - { subjects: {} }, - [ - { - op: 'remove', - path: ['subjects', 'example.com', 'permissions'], + it('throws an error if the globally selected network client is not initialized', () => { + const { controller, mockGetSelectedNetworkClient } = setup({ + state: { + domains: {}, }, - ], - ); - - const { domains } = controller.state; - expect(domains['example.com']).toBeUndefined(); - }); + }); + const snapDomain = 'npm:@metamask/bip32-example-snap'; + mockGetSelectedNetworkClient.mockReturnValue(undefined); - it('should set the proxy to the globally selected network if the globally selected network client is initialized and a proxy exists for the domain', async () => { - const { controller, messenger, mockProviderProxy } = setup({ - state: { domains: { 'example.com': 'foo' } }, + expect(() => + controller.getProviderAndBlockTracker(snapDomain), + ).toThrow('Selected network not initialized'); }); - controller.getProviderAndBlockTracker('example.com'); + }); - messenger.publish( - 'PermissionController:stateChange', - { subjects: {} }, - [ - { - op: 'remove', - path: ['subjects', 'example.com', 'permissions'], + describe('when the domain is a "metamask"', () => { + it('returns a proxied globally selected networkClient and does not create a new proxy in the domainProxyMap', () => { + const { controller, domainProxyMap, messenger } = setup({ + state: { + domains: {}, }, - ], - ); - - expect(mockProviderProxy.setTarget).toHaveBeenCalledWith( - expect.objectContaining({ request: expect.any(Function) }), - ); - expect(mockProviderProxy.setTarget).toHaveBeenCalledTimes(1); + }); + jest.spyOn(messenger, 'call'); - const { domains } = controller.state; - expect(domains['example.com']).toBeUndefined(); - }); + const result = controller.getProviderAndBlockTracker(METAMASK_DOMAIN); - it('should delete the proxy if the globally selected network client is not initialized but a proxy exists for the domain', async () => { - const { - controller, - messenger, - domainProxyMap, - mockProviderProxy, - mockGetSelectedNetworkClient, - } = setup({ - state: { domains: { 'example.com': 'foo' } }, + expect(result).toBeDefined(); + expect(domainProxyMap.get(METAMASK_DOMAIN)).toBeUndefined(); + expect(messenger.call).toHaveBeenCalledWith( + 'NetworkController:getSelectedNetworkClient', + ); }); - controller.getProviderAndBlockTracker('example.com'); - mockGetSelectedNetworkClient.mockReturnValue(undefined); - expect(domainProxyMap.get('example.com')).toBeDefined(); - messenger.publish( - 'PermissionController:stateChange', - { subjects: {} }, - [ - { - op: 'remove', - path: ['subjects', 'example.com', 'permissions'], + it('throws an error if the globally selected network client is not initialized', () => { + const { controller, mockGetSelectedNetworkClient } = setup({ + state: { + domains: {}, }, - ], - ); + }); + mockGetSelectedNetworkClient.mockReturnValue(undefined); - expect(mockProviderProxy.setTarget).toHaveBeenCalledTimes(0); - expect(domainProxyMap.get('example.com')).toBeUndefined(); + expect(() => + controller.getProviderAndBlockTracker(METAMASK_DOMAIN), + ).toThrow('Selected network not initialized'); + }); }); }); - }); - // because of the opacity of the networkClient and proxy implementations, - // its impossible to make valuable assertions around which networkClient proxies - // should be targeted when the useRequestQueuePreference state is toggled on and off: - // When toggled on, the networkClient for the globally selected networkClientId should be used - **not** the NetworkController's proxy of this networkClient. - // When toggled off, the NetworkControllers proxy of the globally selected networkClient should be used - // TODO - improve these tests by using a full NetworkController and doing more robust behavioral testing - describe('onPreferencesStateChange', () => { - const mockProxyProvider = { - setTarget: jest.fn(), - } as unknown as ProviderProxy; - const mockProxyBlockTracker = { - setTarget: jest.fn(), - } as unknown as BlockTrackerProxy; - - describe('when toggled from off to on', () => { - describe('when domains have permissions', () => { - it('sets the target of the existing proxies to the non-proxied networkClient for the globally selected networkClientId', () => { - const domainProxyMap = new Map([ - [ - 'example.com', - { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, - }, - ], + describe('PermissionController:stateChange', () => { + describe('on permission add', () => { + it('should add new domain to domains list', async () => { + const { controller, messenger } = setup({}); + const mockPermission = { + parentCapability: 'eth_accounts', + id: 'example.com', + date: Date.now(), + caveats: [{ type: 'restrictToAccounts', value: ['0x...'] }], + }; + + messenger.publish( + 'PermissionController:stateChange', + { subjects: {} }, [ - 'test.com', { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, + op: 'add', + path: ['subjects', 'example.com', 'permissions'], + value: mockPermission, }, ], - ]); - - const { - mockHasPermissions, - triggerPreferencesStateChange, - messenger, - } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: false, - domainProxyMap, - }); - jest.spyOn(messenger, 'call'); - - mockHasPermissions.mockReturnValue(true); - - triggerPreferencesStateChange({ useRequestQueue: true }); - - // this is a very imperfect way to test this, but networkClients and proxies are opaque - // when the proxy is set with the networkClient fetched via NetworkController:getNetworkClientById - // it **is not** tied to the NetworkController's own proxy of the networkClient - expect(messenger.call).toHaveBeenCalledWith( - 'NetworkController:getNetworkClientById', - 'mainnet', ); - expect(mockProxyProvider.setTarget).toHaveBeenCalledTimes(2); - expect(mockProxyBlockTracker.setTarget).toHaveBeenCalledTimes(2); + + const { domains } = controller.state; + expect(domains['example.com']).toBeDefined(); }); }); - describe('when domains do not have permissions', () => { - it('does not change the target of the existing proxy', () => { - const domainProxyMap = new Map([ + describe('on permission removal', () => { + it('should remove domain from domains list', async () => { + const { controller, messenger } = setup({ + state: { domains: { 'example.com': 'foo' } }, + }); + + messenger.publish( + 'PermissionController:stateChange', + { subjects: {} }, [ - 'example.com', { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, + op: 'remove', + path: ['subjects', 'example.com', 'permissions'], }, ], + ); + + const { domains } = controller.state; + expect(domains['example.com']).toBeUndefined(); + }); + + it('should set the proxy to the globally selected network if the globally selected network client is initialized and a proxy exists for the domain', async () => { + const { controller, messenger, mockProviderProxy } = setup({ + state: { domains: { 'example.com': 'foo' } }, + }); + controller.getProviderAndBlockTracker('example.com'); + + messenger.publish( + 'PermissionController:stateChange', + { subjects: {} }, [ - 'test.com', { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, + op: 'remove', + path: ['subjects', 'example.com', 'permissions'], }, ], - ]); - const { mockHasPermissions, triggerPreferencesStateChange } = setup({ - state: { - domains: {}, - }, - useRequestQueuePreference: false, - domainProxyMap, - }); - - mockHasPermissions.mockReturnValue(false); + ); - triggerPreferencesStateChange({ useRequestQueue: true }); + expect(mockProviderProxy.setTarget).toHaveBeenCalledWith( + expect.objectContaining({ request: expect.any(Function) }), + ); + expect(mockProviderProxy.setTarget).toHaveBeenCalledTimes(1); - expect(mockProxyProvider.setTarget).toHaveBeenCalledTimes(0); - expect(mockProxyBlockTracker.setTarget).toHaveBeenCalledTimes(0); + const { domains } = controller.state; + expect(domains['example.com']).toBeUndefined(); }); - }); - }); - describe('when toggled from on to off', () => { - it('sets the target of the existing proxies to the proxied globally selected networkClient', () => { - const domainProxyMap = new Map([ - [ - 'example.com', - { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, - }, - ], - [ - 'test.com', - { - provider: mockProxyProvider, - blockTracker: mockProxyBlockTracker, - }, - ], - ]); - - const { mockHasPermissions, triggerPreferencesStateChange, messenger } = - setup({ - state: { - domains: { - 'example.com': 'foo', - 'test.com': 'bar', - }, - }, - useRequestQueuePreference: true, + it('should delete the proxy if the globally selected network client is not initialized but a proxy exists for the domain', async () => { + const { + controller, + messenger, domainProxyMap, + mockProviderProxy, + mockGetSelectedNetworkClient, + } = setup({ + state: { domains: { 'example.com': 'foo' } }, }); - jest.spyOn(messenger, 'call'); - - mockHasPermissions.mockReturnValue(true); + controller.getProviderAndBlockTracker('example.com'); - triggerPreferencesStateChange({ useRequestQueue: false }); + mockGetSelectedNetworkClient.mockReturnValue(undefined); + expect(domainProxyMap.get('example.com')).toBeDefined(); + messenger.publish( + 'PermissionController:stateChange', + { subjects: {} }, + [ + { + op: 'remove', + path: ['subjects', 'example.com', 'permissions'], + }, + ], + ); - // this is a very imperfect way to test this, but networkClients and proxies are opaque - // when the proxy is set with the networkClient fetched via NetworkController:getSelectedNetworkClient - // it **is** tied to the NetworkController's own proxy of the networkClient - expect(messenger.call).toHaveBeenCalledWith( - 'NetworkController:getSelectedNetworkClient', - ); - expect(mockProxyProvider.setTarget).toHaveBeenCalledTimes(2); - expect(mockProxyBlockTracker.setTarget).toHaveBeenCalledTimes(2); + expect(mockProviderProxy.setTarget).toHaveBeenCalledTimes(0); + expect(domainProxyMap.get('example.com')).toBeUndefined(); + }); }); }); }); diff --git a/packages/signature-controller/CHANGELOG.md b/packages/signature-controller/CHANGELOG.md index e90718bed8..a20da03c52 100644 --- a/packages/signature-controller/CHANGELOG.md +++ b/packages/signature-controller/CHANGELOG.md @@ -7,6 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [23.0.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-controller` peer dependency from `^18.0.0` to `^19.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) + ## [22.0.0] ### Changed @@ -414,7 +420,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Initial release ([#1214](https://github.com/MetaMask/core/pull/1214)) -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/signature-controller@22.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/signature-controller@23.0.0...HEAD +[23.0.0]: https://github.com/MetaMask/core/compare/@metamask/signature-controller@22.0.0...@metamask/signature-controller@23.0.0 [22.0.0]: https://github.com/MetaMask/core/compare/@metamask/signature-controller@21.1.0...@metamask/signature-controller@22.0.0 [21.1.0]: https://github.com/MetaMask/core/compare/@metamask/signature-controller@21.0.0...@metamask/signature-controller@21.1.0 [21.0.0]: https://github.com/MetaMask/core/compare/@metamask/signature-controller@20.1.0...@metamask/signature-controller@21.0.0 diff --git a/packages/signature-controller/package.json b/packages/signature-controller/package.json index 7180728e34..67e8aec015 100644 --- a/packages/signature-controller/package.json +++ b/packages/signature-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/signature-controller", - "version": "22.0.0", + "version": "23.0.0", "description": "Processes signing requests in order to sign arbitrary and typed data", "keywords": [ "MetaMask", @@ -58,7 +58,7 @@ "devDependencies": { "@metamask/approval-controller": "^7.1.1", "@metamask/auto-changelog": "^3.4.4", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/logging-controller": "^6.0.2", "@metamask/network-controller": "^22.0.2", "@types/jest": "^27.4.1", @@ -71,7 +71,7 @@ }, "peerDependencies": { "@metamask/approval-controller": "^7.0.0", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/logging-controller": "^6.0.0", "@metamask/network-controller": "^22.0.0" }, diff --git a/packages/transaction-controller/CHANGELOG.md b/packages/transaction-controller/CHANGELOG.md index 1c3bd17fca..7b3c7fbf58 100644 --- a/packages/transaction-controller/CHANGELOG.md +++ b/packages/transaction-controller/CHANGELOG.md @@ -26,6 +26,28 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Remove the `blockTracker`, `isMultichainEnabled`, `onNetworkStateChange` and `provider` constructor options. - Remove `filterToCurrentNetwork` option from `getTransactions` method. +## [40.1.0] + +### Added + +- Add `firstTimeInteraction` to transaction meta ([#4895](https://github.com/MetaMask/core/pull/4895)) + - This is a boolean value that indicates whether the transaction is the first time the user has interacted with it. +- Add `isFirstTimeInteractionEnabled` callback constructor option ([#4895](https://github.com/MetaMask/core/pull/4895)) + - This is a function that returns a boolean value indicating whether the first time interaction check should be enabled. + +## [40.0.0] + +### Changed + +- **BREAKING:** Bump `@metamask/accounts-controller` peer dependency from `^19.0.0` to `^20.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) + +## [39.1.0] + +### Changed + +- Temporarily increase the pending transaction polling rate when polling starts ([#4917](https://github.com/MetaMask/core/pull/4917)) + - Poll every 3 seconds up to ten times, then poll on each new block. + ## [39.0.0] ### Changed @@ -1139,7 +1161,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 All changes listed after this point were applied to this package following the monorepo conversion. -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@39.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@40.1.0...HEAD +[40.1.0]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@40.0.0...@metamask/transaction-controller@40.1.0 +[40.0.0]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@39.1.0...@metamask/transaction-controller@40.0.0 +[39.1.0]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@39.0.0...@metamask/transaction-controller@39.1.0 [39.0.0]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@38.3.0...@metamask/transaction-controller@39.0.0 [38.3.0]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@38.2.0...@metamask/transaction-controller@38.3.0 [38.2.0]: https://github.com/MetaMask/core/compare/@metamask/transaction-controller@38.1.0...@metamask/transaction-controller@38.2.0 diff --git a/packages/transaction-controller/jest.config.js b/packages/transaction-controller/jest.config.js index 2fff8bcbab..c0e83e26c8 100644 --- a/packages/transaction-controller/jest.config.js +++ b/packages/transaction-controller/jest.config.js @@ -17,10 +17,10 @@ module.exports = merge(baseConfig, { // An object that configures minimum threshold enforcement for coverage results coverageThreshold: { global: { - branches: 93.41, - functions: 97.51, - lines: 97.51, - statements: 98.23, + branches: 93.23, + functions: 97.61, + lines: 98.26, + statements: 98.28, }, }, diff --git a/packages/transaction-controller/package.json b/packages/transaction-controller/package.json index 56b30af616..a87a19374c 100644 --- a/packages/transaction-controller/package.json +++ b/packages/transaction-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/transaction-controller", - "version": "39.0.0", + "version": "40.1.0", "description": "Stores transactions alongside their periodically updated statuses and manages interactions such as approval and cancellation", "keywords": [ "MetaMask", @@ -69,13 +69,12 @@ }, "devDependencies": { "@babel/runtime": "^7.23.9", - "@metamask/accounts-controller": "^19.0.0", + "@metamask/accounts-controller": "^20.0.0", "@metamask/approval-controller": "^7.1.1", "@metamask/auto-changelog": "^3.4.4", "@metamask/eth-json-rpc-provider": "^4.1.6", "@metamask/ethjs-provider-http": "^0.3.0", "@metamask/gas-fee-controller": "^22.0.1", - "@metamask/keyring-api": "^8.1.3", "@metamask/network-controller": "^22.0.2", "@types/bn.js": "^5.1.5", "@types/jest": "^27.4.1", @@ -92,7 +91,7 @@ }, "peerDependencies": { "@babel/runtime": "^7.23.9", - "@metamask/accounts-controller": "^19.0.0", + "@metamask/accounts-controller": "^20.0.0", "@metamask/approval-controller": "^7.0.0", "@metamask/gas-fee-controller": "^22.0.0", "@metamask/network-controller": "^22.0.0" diff --git a/packages/transaction-controller/src/TransactionController.test.ts b/packages/transaction-controller/src/TransactionController.test.ts index 1c9b83026d..07abe9c297 100644 --- a/packages/transaction-controller/src/TransactionController.test.ts +++ b/packages/transaction-controller/src/TransactionController.test.ts @@ -16,8 +16,6 @@ import { import type { SafeEventEmitterProvider } from '@metamask/eth-json-rpc-provider'; import EthQuery from '@metamask/eth-query'; import HttpProvider from '@metamask/ethjs-provider-http'; -import type { InternalAccount } from '@metamask/keyring-api'; -import { EthAccountType } from '@metamask/keyring-api'; import type { BlockTracker, NetworkClientConfiguration, @@ -42,6 +40,7 @@ import { buildCustomNetworkClientConfiguration, buildMockGetNetworkClientById, } from '../../network-controller/tests/helpers'; +import { getAccountAddressRelationship } from './api/accounts-api'; import { CHAIN_IDS } from './constants'; import { DefaultGasFeeFlow } from './gas-flows/DefaultGasFeeFlow'; import { LineaGasFeeFlow } from './gas-flows/LineaGasFeeFlow'; @@ -70,6 +69,7 @@ import type { GasFeeFlow, GasFeeFlowResponse, SubmitHistoryEntry, + InternalAccount, } from './types'; import { GasFeeEstimateType, @@ -102,6 +102,7 @@ const MOCK_V1_UUID = '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d'; const TRANSACTION_HASH_MOCK = '0x123456'; jest.mock('@metamask/eth-query'); +jest.mock('./api/accounts-api'); jest.mock('./gas-flows/DefaultGasFeeFlow'); jest.mock('./gas-flows/LineaGasFeeFlow'); jest.mock('./gas-flows/TestGasFeeFlow'); @@ -382,10 +383,11 @@ const MOCK_LINEA_GOERLI_NETWORK: MockNetwork = { }; const ACCOUNT_MOCK = '0x6bf137f335ea1b8f193b8f6ea92561a60d23a207'; -const INTERNAL_ACCOUNT_MOCK = { + +const INTERNAL_ACCOUNT_MOCK: InternalAccount = { id: '58def058-d35f-49a1-a7ab-e2580565f6f5', address: ACCOUNT_MOCK, - type: EthAccountType.Eoa, + type: 'eip155:eoa', options: {}, methods: [], metadata: { @@ -479,6 +481,9 @@ describe('TransactionController', () => { ); const getGasFeeFlowMock = jest.mocked(getGasFeeFlow); const shouldResimulateMock = jest.mocked(shouldResimulate); + const getAccountAddressRelationshipMock = jest.mocked( + getAccountAddressRelationship, + ); const methodDataHelperClassMock = jest.mocked(MethodDataHelper); let mockEthQuery: EthQuery; @@ -882,6 +887,10 @@ describe('TransactionController', () => { updateSwapsTransactionMock.mockImplementation( (transactionMeta) => transactionMeta, ); + + getAccountAddressRelationshipMock.mockResolvedValue({ + count: 1, + }); }); describe('constructor', () => { @@ -1355,6 +1364,10 @@ describe('TransactionController', () => { it('adds unapproved transaction to state', async () => { const { controller } = setupController(); + getAccountAddressRelationshipMock.mockResolvedValueOnce({ + count: 0, + }); + const mockDeviceConfirmedOn = WalletDevice.OTHER; const mockOrigin = 'origin'; const mockSecurityAlertResponse = { @@ -1390,6 +1403,8 @@ describe('TransactionController', () => { }, ); + await flushPromises(); + const transactionMeta = controller.state.transactions[0]; expect(updateSwapsTransactionMock).toHaveBeenCalledTimes(1); @@ -1404,6 +1419,69 @@ describe('TransactionController', () => { expect(controller.state.transactions[0].sendFlowHistory).toStrictEqual( mockSendFlowHistory, ); + expect(controller.state.transactions[0].isFirstTimeInteraction).toBe( + true, + ); + }); + + it('does not check account address relationship if a transaction with the same from, to, and chainId exists', async () => { + const { controller } = setupController({ + options: { + state: { + transactions: [ + { + id: '1', + chainId: MOCK_NETWORK.chainId, + networkClientId: NETWORK_CLIENT_ID_MOCK, + status: TransactionStatus.confirmed as const, + time: 123456789, + txParams: { + from: ACCOUNT_MOCK, + to: ACCOUNT_MOCK, + }, + isFirstTimeInteraction: false, // Ensure this is set + }, + ], + }, + }, + }); + + // Add second transaction with the same from, to, and chainId + await controller.addTransaction( + { + from: ACCOUNT_MOCK, + to: ACCOUNT_MOCK, + }, + { + networkClientId: NETWORK_CLIENT_ID_MOCK, + }, + ); + + await flushPromises(); + + expect(controller.state.transactions[1].isFirstTimeInteraction).toBe( + false, + ); + }); + + it('does not update first time interaction properties if disabled', async () => { + const { controller } = setupController({ + options: { isFirstTimeInteractionEnabled: () => false }, + }); + + await controller.addTransaction( + { + from: ACCOUNT_MOCK, + to: ACCOUNT_MOCK, + }, + { + networkClientId: NETWORK_CLIENT_ID_MOCK, + }, + ); + + await flushPromises(); + + expect(getAccountAddressRelationshipMock).not.toHaveBeenCalled(); }); describe('networkClientId exists in the MultichainTrackingHelper', () => { @@ -1492,6 +1570,7 @@ describe('TransactionController', () => { dappSuggestedGasFees: undefined, deviceConfirmedOn: undefined, id: expect.any(String), + isFirstTimeInteraction: undefined, networkClientId: NETWORK_CLIENT_ID_MOCK, origin: undefined, securityAlertResponse: undefined, @@ -2263,7 +2342,7 @@ describe('TransactionController', () => { const mockActionId = 'mockActionId'; - const { result, transactionMeta } = await controller.addTransaction( + const { result } = await controller.addTransaction( { from: ACCOUNT_MOCK, to: ACCOUNT_MOCK, @@ -2284,10 +2363,14 @@ describe('TransactionController', () => { await finishedPromise; expect(rejectedEventListener).toHaveBeenCalledTimes(1); - expect(rejectedEventListener).toHaveBeenCalledWith({ - transactionMeta: { ...transactionMeta, status: 'rejected' }, - actionId: mockActionId, - }); + expect(rejectedEventListener).toHaveBeenCalledWith( + expect.objectContaining({ + transactionMeta: expect.objectContaining({ + status: 'rejected', + }), + actionId: mockActionId, + }), + ); }); }); diff --git a/packages/transaction-controller/src/TransactionController.ts b/packages/transaction-controller/src/TransactionController.ts index 04745a040a..8dbfafc876 100644 --- a/packages/transaction-controller/src/TransactionController.ts +++ b/packages/transaction-controller/src/TransactionController.ts @@ -44,12 +44,16 @@ import type { import { NonceTracker } from '@metamask/nonce-tracker'; import { errorCodes, rpcErrors, providerErrors } from '@metamask/rpc-errors'; import type { Hex } from '@metamask/utils'; -import { add0x } from '@metamask/utils'; +import { add0x, hexToNumber } from '@metamask/utils'; import { Mutex } from 'async-mutex'; import { EventEmitter } from 'events'; import { cloneDeep, mapValues, merge, pickBy, sortBy } from 'lodash'; import { v1 as random } from 'uuid'; +import { + getAccountAddressRelationship, + type GetAccountAddressRelationshipRequest, +} from './api/accounts-api'; import { DefaultGasFeeFlow } from './gas-flows/DefaultGasFeeFlow'; import { LineaGasFeeFlow } from './gas-flows/LineaGasFeeFlow'; import { OptimismLayer1GasFeeFlow } from './gas-flows/OptimismLayer1GasFeeFlow'; @@ -123,6 +127,7 @@ import { normalizeGasFeeValues, } from './utils/utils'; import { + validateParamTo, validateTransactionOrigin, validateTxParams, } from './utils/validation'; @@ -291,6 +296,7 @@ export type TransactionControllerOptions = { /** API keys to be used for Etherscan requests to prevent rate limiting. */ etherscanApiKeysByChainId?: Record; }; + isFirstTimeInteractionEnabled?: () => boolean; isSimulationEnabled?: () => boolean; messenger: TransactionControllerMessenger; pendingTransactions?: PendingTransactionOptions; @@ -626,6 +632,8 @@ export class TransactionController extends BaseController< #transactionHistoryLimit: number; + #isFirstTimeInteractionEnabled: () => boolean; + #isSimulationEnabled: () => boolean; #testGasFeeFlows: boolean; @@ -730,6 +738,7 @@ export class TransactionController extends BaseController< * @param options.getPermittedAccounts - Get accounts that a given origin has permissions for. * @param options.getSavedGasFees - Gets the saved gas fee config. * @param options.incomingTransactions - Configuration options for incoming transaction support. + * @param options.isFirstTimeInteractionEnabled - Whether first time interaction checks are enabled. * @param options.isSimulationEnabled - Whether new transactions will be automatically simulated. * @param options.messenger - The controller messenger. * @param options.pendingTransactions - Configuration options for pending transaction support. @@ -754,6 +763,7 @@ export class TransactionController extends BaseController< getPermittedAccounts, getSavedGasFees, incomingTransactions = {}, + isFirstTimeInteractionEnabled, isSimulationEnabled, messenger, pendingTransactions = {}, @@ -780,6 +790,8 @@ export class TransactionController extends BaseController< this.isSendFlowHistoryDisabled = disableSendFlowHistory ?? false; this.isHistoryDisabled = disableHistory ?? false; this.isSwapsDisabled = disableSwaps ?? false; + this.#isFirstTimeInteractionEnabled = + isFirstTimeInteractionEnabled ?? (() => true); this.#isSimulationEnabled = isSimulationEnabled ?? (() => true); this.getSavedGasFees = getSavedGasFees ?? ((_chainId) => undefined); this.getCurrentAccountEIP1559Compatibility = @@ -1021,15 +1033,16 @@ export class TransactionController extends BaseController< dappSuggestedGasFees, deviceConfirmedOn, id: random(), + isFirstTimeInteraction: undefined, + networkClientId, origin, securityAlertResponse, status: TransactionStatus.unapproved as const, time: Date.now(), txParams, + type: transactionType, userEditedGasLimit: false, verifiedOnBlockchain: false, - type: transactionType, - networkClientId, }; await this.#trace( @@ -1080,8 +1093,16 @@ export class TransactionController extends BaseController< log('Error while updating simulation data', error); throw error; }); + + this.#updateFirstTimeInteraction(addedTransactionMeta, { + traceContext, + }).catch((error) => { + log('Error while updating first interaction properties', error); + }); } else { - log('Skipping simulation as approval not required'); + log( + 'Skipping simulation & first interaction update as approval not required', + ); } this.messagingSystem.publish( @@ -3536,6 +3557,87 @@ export class TransactionController extends BaseController< return transactionMeta; } + async #updateFirstTimeInteraction( + transactionMeta: TransactionMeta, + { + traceContext, + }: { + traceContext?: TraceContext; + } = {}, + ) { + if (!this.#isFirstTimeInteractionEnabled()) { + return; + } + + const { + chainId, + id: transactionId, + txParams: { to, from }, + } = transactionMeta; + + const request: GetAccountAddressRelationshipRequest = { + chainId: hexToNumber(chainId), + to: to as string, + from, + }; + + validateParamTo(to); + + const existingTransaction = this.state.transactions.find( + (tx) => + tx.chainId === chainId && + tx.txParams.from === from && + tx.txParams.to === to && + tx.id !== transactionId, + ); + + // Check if there is an existing transaction with the same from, to, and chainId + // else we continue to check the account address relationship from API + if (existingTransaction) { + return; + } + + try { + const { count } = await this.#trace( + { name: 'Account Address Relationship', parentContext: traceContext }, + () => getAccountAddressRelationship(request), + ); + + const isFirstTimeInteraction = + count === undefined ? undefined : count === 0; + + const finalTransactionMeta = this.getTransaction(transactionId); + + /* istanbul ignore if */ + if (!finalTransactionMeta) { + log( + 'Cannot update first time interaction as transaction not found', + transactionId, + ); + return; + } + + this.#updateTransactionInternal( + { + transactionId, + note: 'TransactionController#updateFirstInteraction - Update first time interaction', + }, + (txMeta) => { + txMeta.isFirstTimeInteraction = isFirstTimeInteraction; + }, + ); + + log('Updated first time interaction', transactionId, { + isFirstTimeInteraction, + }); + } catch (error) { + log( + 'Error fetching account address relationship, skipping first time interaction update', + error, + ); + } + } + async #updateSimulationData( transactionMeta: TransactionMeta, { diff --git a/packages/transaction-controller/src/TransactionControllerIntegration.test.ts b/packages/transaction-controller/src/TransactionControllerIntegration.test.ts index c181a06bf1..3c316f56c6 100644 --- a/packages/transaction-controller/src/TransactionControllerIntegration.test.ts +++ b/packages/transaction-controller/src/TransactionControllerIntegration.test.ts @@ -13,8 +13,6 @@ import { InfuraNetworkType, NetworkType, } from '@metamask/controller-utils'; -import type { InternalAccount } from '@metamask/keyring-api'; -import { EthAccountType, EthMethod } from '@metamask/keyring-api'; import { NetworkController, NetworkClientType, @@ -61,7 +59,7 @@ import type { TransactionControllerOptions, } from './TransactionController'; import { TransactionController } from './TransactionController'; -import type { TransactionMeta } from './types'; +import type { InternalAccount, TransactionMeta } from './types'; import { TransactionStatus, TransactionType } from './types'; import { getEtherscanApiHost } from './utils/etherscan'; import * as etherscanUtils from './utils/etherscan'; @@ -104,22 +102,15 @@ const createMockInternalAccount = ({ id, address, options: {}, - methods: [ - EthMethod.PersonalSign, - EthMethod.Sign, - EthMethod.SignTransaction, - EthMethod.SignTypedDataV1, - EthMethod.SignTypedDataV3, - EthMethod.SignTypedDataV4, - ], - type: EthAccountType.Eoa, + methods: [], + type: 'eip155:eoa', metadata: { name, keyring: { type: 'HD Key Tree' }, importTime, lastSelected, }, - } as InternalAccount; + }; }; const ACCOUNT_MOCK = '0x6bf137f335ea1b8f193b8f6ea92561a60d23a207'; diff --git a/packages/transaction-controller/src/api/accounts-api.test.ts b/packages/transaction-controller/src/api/accounts-api.test.ts new file mode 100644 index 0000000000..72ac59bae8 --- /dev/null +++ b/packages/transaction-controller/src/api/accounts-api.test.ts @@ -0,0 +1,95 @@ +import { FirstTimeInteractionError } from '../errors'; +import { getAccountAddressRelationship } from './accounts-api'; +import type { GetAccountAddressRelationshipRequest } from './accounts-api'; + +describe('Accounts API', () => { + let fetchMock: jest.MockedFunction; + /** + * Mock a JSON response from fetch. + * @param jsonResponse - The response body to return. + * @param status - The status code to return. + */ + function mockFetchResponse(jsonResponse: unknown, status = 200) { + fetchMock.mockResolvedValueOnce({ + json: jest.fn().mockResolvedValue(jsonResponse), + status, + } as unknown as Response); + } + + beforeEach(() => { + fetchMock = jest.spyOn(global, 'fetch') as jest.MockedFunction< + typeof fetch + >; + }); + + describe('getAccountAddressRelationship', () => { + const CHAIN_ID_SUPPORTED = 1; + const CHAIN_ID_UNSUPPORTED = 999; + const FROM_ADDRESS = '0xSender'; + const TO_ADDRESS = '0xRecipient'; + + const REQUEST_MOCK: GetAccountAddressRelationshipRequest = { + chainId: CHAIN_ID_SUPPORTED, + from: FROM_ADDRESS, + to: TO_ADDRESS, + }; + + const EXISTING_RELATIONSHIP_RESPONSE_MOCK = { + count: 1, + }; + + describe('returns API response', () => { + it('for 204 responses', async () => { + mockFetchResponse({}, 204); + + const result = await getAccountAddressRelationship(REQUEST_MOCK); + + expect(result).toStrictEqual({ + count: 0, + }); + }); + + it('when there is no existing relationship', async () => { + mockFetchResponse({ count: 0 }); + + const result = await getAccountAddressRelationship(REQUEST_MOCK); + + expect(result).toStrictEqual({ + count: 0, + }); + }); + }); + + it('returns correct response for existing relationship', async () => { + mockFetchResponse(EXISTING_RELATIONSHIP_RESPONSE_MOCK); + + const result = await getAccountAddressRelationship(REQUEST_MOCK); + + expect(result).toStrictEqual(EXISTING_RELATIONSHIP_RESPONSE_MOCK); + }); + + describe('throws FirstTimeInteractionError', () => { + it('for unsupported chains', async () => { + const request = { + chainId: CHAIN_ID_UNSUPPORTED, + from: FROM_ADDRESS, + to: TO_ADDRESS, + }; + + await expect(getAccountAddressRelationship(request)).rejects.toThrow( + FirstTimeInteractionError, + ); + }); + + it('on error response', async () => { + mockFetchResponse({ + error: { code: 'error_code', message: 'Some error' }, + }); + + await expect( + getAccountAddressRelationship(REQUEST_MOCK), + ).rejects.toThrow(FirstTimeInteractionError); + }); + }); + }); +}); diff --git a/packages/transaction-controller/src/api/accounts-api.ts b/packages/transaction-controller/src/api/accounts-api.ts new file mode 100644 index 0000000000..8364aaddda --- /dev/null +++ b/packages/transaction-controller/src/api/accounts-api.ts @@ -0,0 +1,100 @@ +import { createModuleLogger } from '@metamask/utils'; + +import { FirstTimeInteractionError } from '../errors'; +import { projectLogger } from '../logger'; + +const SUPPORTED_CHAIN_IDS_FOR_RELATIONSHIP_API = [ + 1, // Ethereum Mainnet + 10, // Optimism + 56, // BSC + 137, // Polygon + 8453, // Base + 42161, // Arbitrum + 59144, // Linea + 534352, // Scroll +]; + +export type AccountAddressRelationshipResponse = { + chainId?: number; + count?: number; + data?: { + hash: string; + timestamp: string; + chainId: number; + blockNumber: string; + blockHash: string; + gas: number; + gasUsed: number; + gasPrice: string; + effectiveGasPrice: number; + nonce: number; + cumulativeGasUsed: number; + methodId: string; + value: string; + to: string; + from: string; + }; + txHash?: string; +}; + +export type AccountAddressRelationshipResult = + AccountAddressRelationshipResponse & { + error?: { + code: string; + message: string; + }; + }; + +export type GetAccountAddressRelationshipRequest = { + /** Chain ID of account relationship to check. */ + chainId: number; + + /** Recipient of the transaction. */ + to: string; + + /** Sender of the transaction. */ + from: string; +}; + +const BASE_URL = `https://accounts.api.cx.metamask.io`; + +const log = createModuleLogger(projectLogger, 'accounts-api'); + +/** + * Fetch account address relationship from the accounts API. + * @param request - The request object. + * @returns The raw response object from the API. + */ +export async function getAccountAddressRelationship( + request: GetAccountAddressRelationshipRequest, +): Promise { + const { chainId, from, to } = request; + + if (!SUPPORTED_CHAIN_IDS_FOR_RELATIONSHIP_API.includes(chainId)) { + log('Unsupported chain ID for account relationship API', chainId); + throw new FirstTimeInteractionError('Unsupported chain ID'); + } + + const url = `${BASE_URL}/v1/networks/${chainId}/accounts/${from}/relationships/${to}`; + + log('Getting account address relationship', { request, url }); + + const response = await fetch(url); + + if (response.status === 204) { + // The accounts API returns a 204 status code when there are no transactions with empty body + // imitating a count of 0 + return { count: 0 }; + } + + const responseJson: AccountAddressRelationshipResult = await response.json(); + + log('Retrieved account address relationship', responseJson); + + if (responseJson.error) { + const { code, message } = responseJson.error; + throw new FirstTimeInteractionError(message, code); + } + + return responseJson; +} diff --git a/packages/transaction-controller/src/errors.ts b/packages/transaction-controller/src/errors.ts index 6695ffb0ec..53f79810aa 100644 --- a/packages/transaction-controller/src/errors.ts +++ b/packages/transaction-controller/src/errors.ts @@ -12,6 +12,16 @@ export class SimulationError extends Error { } } +export class FirstTimeInteractionError extends Error { + code?: string | number; + + constructor(message?: string, code?: string | number) { + super(message ?? 'Error checking first time interaction'); + + this.code = code; + } +} + export class SimulationChainNotSupportedError extends SimulationError { constructor(chainId: Hex) { super( diff --git a/packages/transaction-controller/src/helpers/PendingTransactionTracker.test.ts b/packages/transaction-controller/src/helpers/PendingTransactionTracker.test.ts index b28b969392..a093ef5992 100644 --- a/packages/transaction-controller/src/helpers/PendingTransactionTracker.test.ts +++ b/packages/transaction-controller/src/helpers/PendingTransactionTracker.test.ts @@ -1,19 +1,19 @@ -/* eslint-disable jsdoc/require-jsdoc */ - import { query } from '@metamask/controller-utils'; +import type EthQuery from '@metamask/eth-query'; import type { BlockTracker } from '@metamask/network-controller'; import { freeze } from 'immer'; import type { TransactionMeta } from '../types'; import { TransactionStatus } from '../types'; import { PendingTransactionTracker } from './PendingTransactionTracker'; +import { TransactionPoller } from './TransactionPoller'; const ID_MOCK = 'testId'; const CHAIN_ID_MOCK = '0x1'; const NONCE_MOCK = '0x2'; const BLOCK_NUMBER_MOCK = '0x123'; -const ETH_QUERY_MOCK = {}; +const ETH_QUERY_MOCK = {} as unknown as EthQuery; const TRANSACTION_SUBMITTED_MOCK = { id: ID_MOCK, @@ -24,7 +24,7 @@ const TRANSACTION_SUBMITTED_MOCK = { txParams: { nonce: NONCE_MOCK, }, -}; +} as unknown as TransactionMeta; const RECEIPT_MOCK = { blockNumber: BLOCK_NUMBER_MOCK, @@ -38,6 +38,8 @@ const BLOCK_MOCK = { timestamp: 123456, }; +jest.mock('./TransactionPoller'); + jest.mock('@metamask/controller-utils', () => ({ query: jest.fn(), // TODO: Replace `any` with type @@ -45,25 +47,45 @@ jest.mock('@metamask/controller-utils', () => ({ safelyExecute: (fn: () => any) => fn(), })); +/** + * Creates a mock block tracker instance. + * @returns The mock block tracker instance. + */ function createBlockTrackerMock(): jest.Mocked { return { on: jest.fn(), removeListener: jest.fn(), - // TODO: Replace `any` with type - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any; + } as unknown as jest.Mocked; +} + +/** + * Creates a mock transaction poller instance. + * @returns The mock transaction poller instance. + */ +function createTransactionPollerMock(): jest.Mocked { + return { + start: jest.fn(), + stop: jest.fn(), + setPendingTransactions: jest.fn(), + } as unknown as jest.Mocked; } describe('PendingTransactionTracker', () => { const queryMock = jest.mocked(query); let blockTracker: jest.Mocked; - let failTransaction: jest.Mock; let pendingTransactionTracker: PendingTransactionTracker; - // TODO: Replace `any` with type - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let options: any; - - async function onLatestBlock( + let transactionPoller: jest.Mocked; + + let options: jest.Mocked< + ConstructorParameters[0] + >; + + /** + * Simulates a poll event. + * @param latestBlockNumber - The latest block number. + * @param transactionsOnCheck - The current transactions during the check. + */ + async function onPoll( latestBlockNumber?: string, transactionsOnCheck?: TransactionMeta[], ) { @@ -79,29 +101,27 @@ describe('PendingTransactionTracker', () => { ); } - // TODO: Either fix this lint violation or explain why it's necessary to ignore. - // eslint-disable-next-line @typescript-eslint/await-thenable - await blockTracker.on.mock.calls[0][1](latestBlockNumber); + await transactionPoller.start.mock.calls[0][0](latestBlockNumber as string); } beforeEach(() => { blockTracker = createBlockTrackerMock(); - failTransaction = jest.fn(); + transactionPoller = createTransactionPollerMock(); + + jest.mocked(TransactionPoller).mockImplementation(() => transactionPoller); options = { - approveTransaction: jest.fn(), blockTracker, - failTransaction, - getChainId: () => CHAIN_ID_MOCK, - getEthQuery: () => ETH_QUERY_MOCK, + getChainId: jest.fn(() => CHAIN_ID_MOCK), + getEthQuery: jest.fn(() => ETH_QUERY_MOCK), getTransactions: jest.fn(), - getGlobalLock: () => Promise.resolve(jest.fn()), + getGlobalLock: jest.fn(() => Promise.resolve(jest.fn())), publishTransaction: jest.fn(), }; }); describe('on state change', () => { - it('adds block tracker listener if pending transactions', () => { + it('adds listener if pending transactions', () => { pendingTransactionTracker = new PendingTransactionTracker(options); options.getTransactions.mockReturnValue( @@ -110,14 +130,13 @@ describe('PendingTransactionTracker', () => { pendingTransactionTracker.startIfPendingTransactions(); - expect(blockTracker.on).toHaveBeenCalledTimes(1); - expect(blockTracker.on).toHaveBeenCalledWith( - 'latest', + expect(transactionPoller.start).toHaveBeenCalledTimes(1); + expect(transactionPoller.start).toHaveBeenCalledWith( expect.any(Function), ); }); - it('does nothing if block tracker listener already added', () => { + it('does nothing if listener already added', () => { pendingTransactionTracker = new PendingTransactionTracker(options); options.getTransactions.mockReturnValue( @@ -127,11 +146,11 @@ describe('PendingTransactionTracker', () => { pendingTransactionTracker.startIfPendingTransactions(); pendingTransactionTracker.startIfPendingTransactions(); - expect(blockTracker.on).toHaveBeenCalledTimes(1); - expect(blockTracker.removeListener).toHaveBeenCalledTimes(0); + expect(transactionPoller.start).toHaveBeenCalledTimes(1); + expect(transactionPoller.stop).toHaveBeenCalledTimes(0); }); - it('removes block tracker listener if no pending transactions and running', () => { + it('removes listener if no pending transactions and running', () => { pendingTransactionTracker = new PendingTransactionTracker(options); options.getTransactions.mockReturnValue( @@ -140,20 +159,16 @@ describe('PendingTransactionTracker', () => { pendingTransactionTracker.startIfPendingTransactions(); - expect(blockTracker.removeListener).toHaveBeenCalledTimes(0); + expect(transactionPoller.stop).toHaveBeenCalledTimes(0); options.getTransactions.mockReturnValue([]); pendingTransactionTracker.startIfPendingTransactions(); - expect(blockTracker.removeListener).toHaveBeenCalledTimes(1); - expect(blockTracker.removeListener).toHaveBeenCalledWith( - 'latest', - expect.any(Function), - ); + expect(transactionPoller.stop).toHaveBeenCalledTimes(1); }); - it('does nothing if block tracker listener already removed', () => { + it('does nothing if listener already removed', () => { pendingTransactionTracker = new PendingTransactionTracker(options); options.getTransactions.mockReturnValue( @@ -168,11 +183,11 @@ describe('PendingTransactionTracker', () => { pendingTransactionTracker.startIfPendingTransactions(); - expect(blockTracker.removeListener).toHaveBeenCalledTimes(1); + expect(transactionPoller.stop).toHaveBeenCalledTimes(1); pendingTransactionTracker.startIfPendingTransactions(); - expect(blockTracker.removeListener).toHaveBeenCalledTimes(1); + expect(transactionPoller.stop).toHaveBeenCalledTimes(1); }); }); @@ -201,7 +216,7 @@ describe('PendingTransactionTracker', () => { listener, ); - await onLatestBlock(undefined, [ + await onPoll(undefined, [ { ...TRANSACTION_SUBMITTED_MOCK, status: TransactionStatus.dropped, @@ -248,7 +263,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(0); }); @@ -278,7 +293,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce({ ...RECEIPT_MOCK, status: null }); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(0); }); @@ -308,7 +323,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce({ ...RECEIPT_MOCK, status: '0x3' }); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(0); }); @@ -333,7 +348,7 @@ describe('PendingTransactionTracker', () => { listener, ); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(1); expect(listener).toHaveBeenCalledWith( @@ -366,7 +381,7 @@ describe('PendingTransactionTracker', () => { listener, ); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(0); }); @@ -387,7 +402,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce({ ...RECEIPT_MOCK, status: '0x0' }); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(1); expect(listener).toHaveBeenCalledWith( @@ -405,7 +420,7 @@ describe('PendingTransactionTracker', () => { ...TRANSACTION_SUBMITTED_MOCK, id: `${ID_MOCK}2`, status: TransactionStatus.confirmed, - }; + } as unknown as TransactionMeta; const submittedTransactionMetaMock = { ...TRANSACTION_SUBMITTED_MOCK, @@ -425,7 +440,7 @@ describe('PendingTransactionTracker', () => { listener, ); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(1); expect(listener).toHaveBeenCalledWith(submittedTransactionMetaMock); @@ -451,7 +466,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x3'); - await onLatestBlock(); + await onPoll(); } expect(listener).toHaveBeenCalledTimes(1); @@ -466,7 +481,7 @@ describe('PendingTransactionTracker', () => { id: `${ID_MOCK}2`, chainId: '0x2', status: TransactionStatus.confirmed, - }; + } as unknown as TransactionMeta; const submittedTransactionMetaMock = { ...TRANSACTION_SUBMITTED_MOCK, @@ -485,7 +500,7 @@ describe('PendingTransactionTracker', () => { listener, ); - await onLatestBlock(); + await onPoll(); expect(listener).not.toHaveBeenCalled(); }); @@ -512,7 +527,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(RECEIPT_MOCK); queryMock.mockResolvedValueOnce(BLOCK_MOCK); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(1); expect(listener).toHaveBeenCalledWith( @@ -552,7 +567,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(RECEIPT_MOCK); queryMock.mockResolvedValueOnce(BLOCK_MOCK); - await onLatestBlock(); + await onPoll(); expect(listener).toHaveBeenCalledTimes(2); expect(listener).toHaveBeenCalledWith( @@ -591,7 +606,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockRejectedValueOnce(new Error('TestError')); queryMock.mockResolvedValueOnce(BLOCK_MOCK); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); getTransactions.mockReturnValue( freeze( [ @@ -625,9 +640,8 @@ describe('PendingTransactionTracker', () => { it('if no pending transactions', async () => { pendingTransactionTracker = new PendingTransactionTracker(options); - await onLatestBlock(undefined, []); + await onPoll(undefined, []); - expect(options.approveTransaction).toHaveBeenCalledTimes(0); expect(options.publishTransaction).toHaveBeenCalledTimes(0); }); }); @@ -650,7 +664,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); expect(listener).toHaveBeenCalledTimes(1); expect(listener).toHaveBeenCalledWith( @@ -682,7 +696,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); getTransactions.mockReturnValue( freeze( [ @@ -694,7 +708,7 @@ describe('PendingTransactionTracker', () => { true, ), ); - await onLatestBlock('0x124'); + await onPoll('0x124'); expect(listener).toHaveBeenCalledTimes(2); expect(listener).toHaveBeenCalledWith( @@ -731,7 +745,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); getTransactions.mockReturnValue( freeze( [ @@ -743,7 +757,7 @@ describe('PendingTransactionTracker', () => { true, ), ); - await onLatestBlock('0x124'); + await onPoll('0x124'); expect(listener).toHaveBeenCalledTimes(1); expect(listener).toHaveBeenCalledWith( @@ -780,7 +794,7 @@ describe('PendingTransactionTracker', () => { new Error('TestError'), ); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); getTransactions.mockReturnValue( freeze( [ @@ -792,7 +806,7 @@ describe('PendingTransactionTracker', () => { true, ), ); - await onLatestBlock('0x124'); + await onPoll('0x124'); expect(listener).toHaveBeenCalledTimes(2); expect(listener).toHaveBeenCalledWith( @@ -833,7 +847,7 @@ describe('PendingTransactionTracker', () => { new Error('test gas price too low to replace test'), ); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); getTransactions.mockReturnValue( freeze( [ @@ -845,7 +859,7 @@ describe('PendingTransactionTracker', () => { true, ), ); - await onLatestBlock('0x124'); + await onPoll('0x124'); expect(listener).toHaveBeenCalledTimes(1); expect(listener).not.toHaveBeenCalledWith( @@ -870,7 +884,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); getTransactions.mockReturnValue( freeze( [ @@ -882,7 +896,7 @@ describe('PendingTransactionTracker', () => { true, ), ); - await onLatestBlock('0x124'); + await onPoll('0x124'); expect(options.publishTransaction).toHaveBeenCalledTimes(1); expect(options.publishTransaction).toHaveBeenCalledWith( @@ -908,7 +922,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); expect(options.publishTransaction).toHaveBeenCalledTimes(0); getTransactions.mockReturnValue( freeze( @@ -922,7 +936,7 @@ describe('PendingTransactionTracker', () => { ), ); - await onLatestBlock('0x124'); + await onPoll('0x124'); expect(options.publishTransaction).toHaveBeenCalledTimes(1); getTransactions.mockReturnValue( freeze( @@ -937,7 +951,7 @@ describe('PendingTransactionTracker', () => { ), ); - await onLatestBlock('0x125'); + await onPoll('0x125'); expect(options.publishTransaction).toHaveBeenCalledTimes(2); getTransactions.mockReturnValue( freeze( @@ -952,10 +966,10 @@ describe('PendingTransactionTracker', () => { ), ); - await onLatestBlock('0x126'); + await onPoll('0x126'); expect(options.publishTransaction).toHaveBeenCalledTimes(2); - await onLatestBlock('0x127'); + await onPoll('0x127'); expect(options.publishTransaction).toHaveBeenCalledTimes(3); getTransactions.mockReturnValue( freeze( @@ -970,10 +984,10 @@ describe('PendingTransactionTracker', () => { ), ); - await onLatestBlock('0x12A'); + await onPoll('0x12A'); expect(options.publishTransaction).toHaveBeenCalledTimes(3); - await onLatestBlock('0x12B'); + await onPoll('0x12B'); expect(options.publishTransaction).toHaveBeenCalledTimes(4); }); @@ -992,7 +1006,7 @@ describe('PendingTransactionTracker', () => { queryMock.mockResolvedValueOnce(undefined); queryMock.mockResolvedValueOnce('0x1'); - await onLatestBlock(BLOCK_NUMBER_MOCK); + await onPoll(BLOCK_NUMBER_MOCK); getTransactions.mockReturnValue( freeze( @@ -1006,7 +1020,7 @@ describe('PendingTransactionTracker', () => { ), ); - await onLatestBlock('0x124'); + await onPoll('0x124'); expect(options.publishTransaction).toHaveBeenCalledTimes(0); }); diff --git a/packages/transaction-controller/src/helpers/PendingTransactionTracker.ts b/packages/transaction-controller/src/helpers/PendingTransactionTracker.ts index 461903e3cd..c159e66f01 100644 --- a/packages/transaction-controller/src/helpers/PendingTransactionTracker.ts +++ b/packages/transaction-controller/src/helpers/PendingTransactionTracker.ts @@ -10,6 +10,7 @@ import { cloneDeep, merge } from 'lodash'; import { createModuleLogger, projectLogger } from '../logger'; import type { TransactionMeta, TransactionReceipt } from '../types'; import { TransactionStatus, TransactionType } from '../types'; +import { TransactionPoller } from './TransactionPoller'; /** * We wait this many blocks before emitting a 'transaction-dropped' event @@ -63,8 +64,6 @@ export interface PendingTransactionTrackerEventEmitter extends EventEmitter { export class PendingTransactionTracker { hub: PendingTransactionTrackerEventEmitter; - #blockTracker: BlockTracker; - #droppedBlockCountByHash: Map; #getChainId: () => string; @@ -90,6 +89,8 @@ export class PendingTransactionTracker { #running: boolean; + #transactionPoller: TransactionPoller; + #beforeCheckPendingTransaction: (transactionMeta: TransactionMeta) => boolean; #beforePublish: (transactionMeta: TransactionMeta) => boolean; @@ -123,7 +124,6 @@ export class PendingTransactionTracker { }) { this.hub = new EventEmitter() as PendingTransactionTrackerEventEmitter; - this.#blockTracker = blockTracker; this.#droppedBlockCountByHash = new Map(); this.#getChainId = getChainId; this.#getEthQuery = getEthQuery; @@ -134,6 +134,7 @@ export class PendingTransactionTracker { this.#getGlobalLock = getGlobalLock; this.#publishTransaction = publishTransaction; this.#running = false; + this.#transactionPoller = new TransactionPoller(blockTracker); this.#beforePublish = hooks?.beforePublish ?? (() => true); this.#beforeCheckPendingTransaction = hooks?.beforeCheckPendingTransaction ?? (() => true); @@ -143,7 +144,7 @@ export class PendingTransactionTracker { const pendingTransactions = this.#getPendingTransactions(); if (pendingTransactions.length) { - this.#start(); + this.#start(pendingTransactions); } else { this.stop(); } @@ -167,12 +168,14 @@ export class PendingTransactionTracker { } } - #start() { + #start(pendingTransactions: TransactionMeta[]) { + this.#transactionPoller.setPendingTransactions(pendingTransactions); + if (this.#running) { return; } - this.#blockTracker.on('latest', this.#listener); + this.#transactionPoller.start(this.#listener); this.#running = true; this.#log('Started polling'); @@ -183,7 +186,7 @@ export class PendingTransactionTracker { return; } - this.#blockTracker.removeListener('latest', this.#listener); + this.#transactionPoller.stop(); this.#running = false; this.#log('Stopped polling'); diff --git a/packages/transaction-controller/src/helpers/TransactionPoller.test.ts b/packages/transaction-controller/src/helpers/TransactionPoller.test.ts new file mode 100644 index 0000000000..c3dfd28f4c --- /dev/null +++ b/packages/transaction-controller/src/helpers/TransactionPoller.test.ts @@ -0,0 +1,264 @@ +import type { BlockTracker } from '@metamask/network-controller'; + +import { flushPromises } from '../../../../tests/helpers'; +import type { TransactionMeta } from '../types'; +import { ACCELERATED_COUNT_MAX, TransactionPoller } from './TransactionPoller'; + +jest.useFakeTimers(); + +const BLOCK_NUMBER_MOCK = '0x123'; + +const BLOCK_TRACKER_MOCK = { + getLatestBlock: jest.fn(), + on: jest.fn(), + removeListener: jest.fn(), +} as unknown as jest.Mocked; + +/** + * Creates a mock transaction metadata object. + * @param id - The transaction ID. + * @returns The mock transaction metadata object. + */ +function createTransactionMetaMock(id: string) { + return { id } as TransactionMeta; +} + +describe('TransactionPoller', () => { + beforeEach(() => { + jest.resetAllMocks(); + jest.clearAllTimers(); + }); + + describe('Accelerated Polling', () => { + it('invokes listener after timeout', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + const listener = jest.fn(); + poller.start(listener); + + expect(jest.getTimerCount()).toBe(1); + + jest.runOnlyPendingTimers(); + await flushPromises(); + + expect(listener).toHaveBeenCalledTimes(1); + }); + + it('stops creating timeouts after max reached', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + const listener = jest.fn(); + poller.start(listener); + + for (let i = 0; i < ACCELERATED_COUNT_MAX * 3; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + expect(listener).toHaveBeenCalledTimes(ACCELERATED_COUNT_MAX); + }); + + it('invokes listener with latest block number from block tracker', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + BLOCK_TRACKER_MOCK.getLatestBlock.mockResolvedValue(BLOCK_NUMBER_MOCK); + + const listener = jest.fn(); + poller.start(listener); + + jest.runOnlyPendingTimers(); + await flushPromises(); + + expect(listener).toHaveBeenCalledWith(BLOCK_NUMBER_MOCK); + }); + + it('does not create timeout if stopped while listener being invoked', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + const listener = jest.fn(); + listener.mockImplementation(() => poller.stop()); + + poller.start(listener); + + jest.runOnlyPendingTimers(); + await flushPromises(); + + expect(jest.getTimerCount()).toBe(0); + }); + }); + + describe('Block Tracker Polling', () => { + it('invokes listener on block tracker update after accelerated limit reached', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + const listener = jest.fn(); + poller.start(listener); + + for (let i = 0; i < ACCELERATED_COUNT_MAX; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + BLOCK_TRACKER_MOCK.on.mock.calls[0][1](); + await flushPromises(); + + BLOCK_TRACKER_MOCK.on.mock.calls[0][1](); + await flushPromises(); + + expect(listener).toHaveBeenCalledTimes(ACCELERATED_COUNT_MAX + 2); + }); + + it('invokes listener with latest block number from event', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + const listener = jest.fn(); + poller.start(listener); + + for (let i = 0; i < ACCELERATED_COUNT_MAX; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + BLOCK_TRACKER_MOCK.on.mock.calls[0][1](BLOCK_NUMBER_MOCK); + await flushPromises(); + + expect(listener).toHaveBeenCalledWith(BLOCK_NUMBER_MOCK); + }); + }); + + describe('start', () => { + it('does nothing if already started', () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + poller.start(jest.fn()); + poller.start(jest.fn()); + + expect(jest.getTimerCount()).toBe(1); + }); + }); + + describe('stop', () => { + it('removes timeout', () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + const listener = jest.fn(); + poller.start(listener); + poller.stop(); + + expect(jest.getTimerCount()).toBe(0); + expect(listener).not.toHaveBeenCalled(); + }); + + it('removes block tracker listener', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + const listener = jest.fn(); + poller.start(listener); + + for (let i = 0; i < ACCELERATED_COUNT_MAX; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + poller.stop(); + + expect(BLOCK_TRACKER_MOCK.removeListener).toHaveBeenCalledTimes(1); + expect(listener).toHaveBeenCalledTimes(ACCELERATED_COUNT_MAX); + }); + + it('does nothing if not started', async () => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + poller.stop(); + + expect(jest.getTimerCount()).toBe(0); + expect(BLOCK_TRACKER_MOCK.removeListener).not.toHaveBeenCalled(); + }); + }); + + describe('setPendingTransactions', () => { + it.each([ + [ + 'added', + [ + createTransactionMetaMock('1'), + createTransactionMetaMock('2'), + createTransactionMetaMock('3'), + ], + ], + ['removed', [createTransactionMetaMock('1')]], + ])( + 'resets accelerated count if transaction IDs %s', + async (_title, newPendingTransactions) => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + poller.setPendingTransactions([ + createTransactionMetaMock('1'), + createTransactionMetaMock('2'), + ]); + + const listener = jest.fn(); + poller.start(listener); + + for (let i = 0; i < 3; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + poller.setPendingTransactions(newPendingTransactions); + + for (let i = 0; i < ACCELERATED_COUNT_MAX; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + expect(listener).toHaveBeenCalledTimes(ACCELERATED_COUNT_MAX + 3); + }, + ); + + it.each([ + [ + 'added', + [ + createTransactionMetaMock('1'), + createTransactionMetaMock('2'), + createTransactionMetaMock('3'), + ], + ], + ['removed', [createTransactionMetaMock('1')]], + ])( + 'resets to accelerated polling if transaction IDs added', + async (_title, newPendingTransactions) => { + const poller = new TransactionPoller(BLOCK_TRACKER_MOCK); + + poller.setPendingTransactions([ + createTransactionMetaMock('1'), + createTransactionMetaMock('2'), + ]); + + const listener = jest.fn(); + poller.start(listener); + + for (let i = 0; i < ACCELERATED_COUNT_MAX; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + BLOCK_TRACKER_MOCK.on.mock.calls[0][1](BLOCK_NUMBER_MOCK); + await flushPromises(); + + BLOCK_TRACKER_MOCK.on.mock.calls[0][1](BLOCK_NUMBER_MOCK); + await flushPromises(); + + poller.setPendingTransactions(newPendingTransactions); + + for (let i = 0; i < ACCELERATED_COUNT_MAX; i++) { + jest.runOnlyPendingTimers(); + await flushPromises(); + } + + expect(listener).toHaveBeenCalledTimes(ACCELERATED_COUNT_MAX * 2 + 2); + }, + ); + }); +}); diff --git a/packages/transaction-controller/src/helpers/TransactionPoller.ts b/packages/transaction-controller/src/helpers/TransactionPoller.ts new file mode 100644 index 0000000000..cc0b4647b2 --- /dev/null +++ b/packages/transaction-controller/src/helpers/TransactionPoller.ts @@ -0,0 +1,168 @@ +import type { BlockTracker } from '@metamask/network-controller'; +import { createModuleLogger } from '@metamask/utils'; +import { isEqual } from 'lodash'; + +import { projectLogger } from '../logger'; +import type { TransactionMeta } from '../types'; + +export const ACCELERATED_COUNT_MAX = 10; +export const ACCELERATED_INTERVAL = 1000 * 3; // 3 Seconds + +const log = createModuleLogger(projectLogger, 'transaction-poller'); + +/** + * Helper class to orchestrate when to poll pending transactions. + * Initially starts polling via a timeout chain every 2 seconds up to 5 times. + * Following that, it will poll on every new block via the block tracker. + */ +export class TransactionPoller { + #acceleratedCount = 0; + + #blockTracker: BlockTracker; + + #blockTrackerListener?: (latestBlockNumber: string) => void; + + #listener?: (latestBlockNumber: string) => Promise; + + #pendingTransactions?: TransactionMeta[]; + + #running = false; + + #timeout?: NodeJS.Timeout; + + constructor(blockTracker: BlockTracker) { + this.#blockTracker = blockTracker; + } + + /** + * Start the poller with a listener that will be called on every interval. + * @param listener - The listener to call on every interval. + */ + start(listener: (latestBlockNumber: string) => Promise) { + if (this.#running) { + return; + } + + this.#listener = listener; + this.#running = true; + + this.#queue(); + + log('Started'); + } + + /** + * Stop the poller. + * Remove all timeouts and block tracker listeners. + */ + stop() { + if (!this.#running) { + return; + } + + this.#running = false; + this.#listener = undefined; + this.#acceleratedCount = 0; + this.#pendingTransactions = undefined; + + this.#stopTimeout(); + this.#stopBlockTracker(); + + log('Stopped'); + } + + /** + * Notify the poller of the pending transactions being monitored. + * This will reset to the accelerated polling and reset the count + * when new transactions are added or removed. + * @param pendingTransactions - The pending transactions to poll. + */ + setPendingTransactions(pendingTransactions: TransactionMeta[]) { + const currentPendingTransactionIds = (this.#pendingTransactions ?? []).map( + (tx) => tx.id, + ); + + this.#pendingTransactions = pendingTransactions; + + const newPendingTransactionIds = pendingTransactions.map((tx) => tx.id); + + const hasUpdatedIds = !isEqual( + currentPendingTransactionIds, + newPendingTransactionIds, + ); + + if (!this.#running || !hasUpdatedIds) { + return; + } + + log('Detected new pending transactions', newPendingTransactionIds); + + this.#acceleratedCount = 0; + + if (this.#blockTrackerListener) { + this.#stopBlockTracker(); + this.#queue(); + } + } + + #queue() { + if (!this.#running) { + return; + } + + if (this.#acceleratedCount >= ACCELERATED_COUNT_MAX) { + // eslint-disable-next-line @typescript-eslint/no-misused-promises + this.#blockTrackerListener = (latestBlockNumber) => + this.#interval(false, latestBlockNumber); + + this.#blockTracker.on('latest', this.#blockTrackerListener); + + log('Added block tracker listener'); + + return; + } + + this.#stopTimeout(); + + // eslint-disable-next-line @typescript-eslint/no-misused-promises + this.#timeout = setTimeout(async () => { + await this.#interval(true); + this.#queue(); + }, ACCELERATED_INTERVAL); + } + + async #interval(isAccelerated: boolean, latestBlockNumber?: string) { + if (isAccelerated) { + log('Accelerated interval', this.#acceleratedCount + 1); + } else { + log('Block tracker interval', latestBlockNumber); + } + + const latestBlockNumberFinal = + latestBlockNumber ?? (await this.#blockTracker.getLatestBlock()); + + await this.#listener?.(latestBlockNumberFinal); + + if (isAccelerated && this.#running) { + this.#acceleratedCount += 1; + } + } + + #stopTimeout() { + if (!this.#timeout) { + return; + } + + clearTimeout(this.#timeout); + this.#timeout = undefined; + } + + #stopBlockTracker() { + if (!this.#blockTrackerListener) { + return; + } + + this.#blockTracker.removeListener('latest', this.#blockTrackerListener); + this.#blockTrackerListener = undefined; + } +} diff --git a/packages/transaction-controller/src/types.ts b/packages/transaction-controller/src/types.ts index dda87ea640..bb3b377337 100644 --- a/packages/transaction-controller/src/types.ts +++ b/packages/transaction-controller/src/types.ts @@ -1,4 +1,5 @@ import type { AccessList } from '@ethereumjs/tx'; +import type { AccountsController } from '@metamask/accounts-controller'; import type EthQuery from '@metamask/eth-query'; import type { GasFeeState } from '@metamask/gas-fee-controller'; import type { NetworkClientId, Provider } from '@metamask/network-controller'; @@ -167,6 +168,11 @@ type TransactionMetaBase = { */ firstRetryBlockNumber?: string; + /** + * Whether the transaction is the first time interaction. + */ + isFirstTimeInteraction?: boolean; + /** Alternate EIP-1559 gas fee estimates for multiple priority levels. */ gasFeeEstimates?: GasFeeEstimates; @@ -1344,3 +1350,7 @@ export type SubmitHistoryEntry = { /** The transaction parameters that were submitted. */ transaction: TransactionParams; }; + +export type InternalAccount = ReturnType< + AccountsController['getSelectedAccount'] +>; diff --git a/packages/transaction-controller/src/utils/validation.ts b/packages/transaction-controller/src/utils/validation.ts index 689243b3eb..3e725483fe 100644 --- a/packages/transaction-controller/src/utils/validation.ts +++ b/packages/transaction-controller/src/utils/validation.ts @@ -184,6 +184,18 @@ function validateParamFrom(from: string) { } } +/** + * Validates the recipient address in a transaction's parameters. + * + * @param to - The to property to validate. + * @throws Throws an error if the recipient address is invalid. + */ +export function validateParamTo(to?: string) { + if (!to || typeof to !== 'string') { + throw rpcErrors.invalidParams(`Invalid "to" address`); + } +} + /** * Validates input data for transactions. * diff --git a/packages/user-operation-controller/CHANGELOG.md b/packages/user-operation-controller/CHANGELOG.md index 1edad7b589..64739f9307 100644 --- a/packages/user-operation-controller/CHANGELOG.md +++ b/packages/user-operation-controller/CHANGELOG.md @@ -7,6 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [19.0.0] + +### Changed + +- **BREAKING:** Bump `@metamask/keyring-controller` peer dependency from `^18.0.0` to `^19.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) +- **BREAKING:** Bump `@metamask/transaction-controller` peer dependency from `^39.0.0` to `^40.0.0` ([#4195](https://github.com/MetaMask/core/pull/4956)) + ## [18.0.0] ### Changed @@ -268,7 +275,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Initial Release ([#3749](https://github.com/MetaMask/core/pull/3749)) -[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/user-operation-controller@18.0.0...HEAD +[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/user-operation-controller@19.0.0...HEAD +[19.0.0]: https://github.com/MetaMask/core/compare/@metamask/user-operation-controller@18.0.0...@metamask/user-operation-controller@19.0.0 [18.0.0]: https://github.com/MetaMask/core/compare/@metamask/user-operation-controller@17.0.0...@metamask/user-operation-controller@18.0.0 [17.0.0]: https://github.com/MetaMask/core/compare/@metamask/user-operation-controller@16.0.0...@metamask/user-operation-controller@17.0.0 [16.0.0]: https://github.com/MetaMask/core/compare/@metamask/user-operation-controller@15.0.1...@metamask/user-operation-controller@16.0.0 diff --git a/packages/user-operation-controller/package.json b/packages/user-operation-controller/package.json index 219eda7a18..cda601915f 100644 --- a/packages/user-operation-controller/package.json +++ b/packages/user-operation-controller/package.json @@ -1,6 +1,6 @@ { "name": "@metamask/user-operation-controller", - "version": "18.0.0", + "version": "19.0.0", "description": "Creates user operations and manages their life cycle", "keywords": [ "MetaMask", @@ -64,9 +64,9 @@ "@metamask/approval-controller": "^7.1.1", "@metamask/auto-changelog": "^3.4.4", "@metamask/gas-fee-controller": "^22.0.1", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/network-controller": "^22.0.2", - "@metamask/transaction-controller": "^39.0.0", + "@metamask/transaction-controller": "^40.1.0", "@types/jest": "^27.4.1", "deepmerge": "^4.2.2", "jest": "^27.5.1", @@ -78,9 +78,9 @@ "peerDependencies": { "@metamask/approval-controller": "^7.0.0", "@metamask/gas-fee-controller": "^22.0.0", - "@metamask/keyring-controller": "^18.0.0", + "@metamask/keyring-controller": "^19.0.0", "@metamask/network-controller": "^22.0.0", - "@metamask/transaction-controller": "^39.0.0" + "@metamask/transaction-controller": "^40.0.0" }, "engines": { "node": "^18.18 || >=20" diff --git a/yarn.lock b/yarn.lock index be82fa2fad..1c9d6915b2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2027,19 +2027,19 @@ __metadata: languageName: node linkType: hard -"@metamask/accounts-controller@npm:^19.0.0, @metamask/accounts-controller@workspace:packages/accounts-controller": +"@metamask/accounts-controller@npm:^20.0.0, @metamask/accounts-controller@workspace:packages/accounts-controller": version: 0.0.0-use.local resolution: "@metamask/accounts-controller@workspace:packages/accounts-controller" dependencies: "@ethereumjs/util": "npm:^8.1.0" "@metamask/auto-changelog": "npm:^3.4.4" "@metamask/base-controller": "npm:^7.0.2" - "@metamask/eth-snap-keyring": "npm:^4.3.6" - "@metamask/keyring-api": "npm:^8.1.3" - "@metamask/keyring-controller": "npm:^18.0.0" - "@metamask/snaps-controllers": "npm:^9.7.0" - "@metamask/snaps-sdk": "npm:^6.5.0" - "@metamask/snaps-utils": "npm:^8.1.1" + "@metamask/eth-snap-keyring": "npm:^5.0.1" + "@metamask/keyring-api": "npm:^10.1.0" + "@metamask/keyring-controller": "npm:^19.0.0" + "@metamask/snaps-controllers": "npm:^9.10.0" + "@metamask/snaps-sdk": "npm:^6.7.0" + "@metamask/snaps-utils": "npm:^8.3.0" "@metamask/utils": "npm:^10.0.0" "@types/jest": "npm:^27.4.1" "@types/readable-stream": "npm:^2.3.0" @@ -2053,7 +2053,7 @@ __metadata: typescript: "npm:~5.2.2" uuid: "npm:^8.3.2" peerDependencies: - "@metamask/keyring-controller": ^18.0.0 + "@metamask/keyring-controller": ^19.0.0 "@metamask/snaps-controllers": ^9.7.0 languageName: unknown linkType: soft @@ -2103,7 +2103,14 @@ __metadata: languageName: unknown linkType: soft -"@metamask/approval-controller@npm:^7.0.2, @metamask/approval-controller@npm:^7.1.1, @metamask/approval-controller@workspace:packages/approval-controller": +"@metamask/api-specs@npm:^0.10.12": + version: 0.10.12 + resolution: "@metamask/api-specs@npm:0.10.12" + checksum: 10/e592f27f350994688d3d54a8a8db16de033011ef665efe3283a77431914d8d69d1c3312fad33e4245b4984e1223b04c98da3d0a68c7f9577cf8290ba441c52ee + languageName: node + linkType: hard + +"@metamask/approval-controller@npm:^7.1.1, @metamask/approval-controller@workspace:packages/approval-controller": version: 0.0.0-use.local resolution: "@metamask/approval-controller@workspace:packages/approval-controller" dependencies: @@ -2128,12 +2135,13 @@ __metadata: resolution: "@metamask/assets-controllers@workspace:packages/assets-controllers" dependencies: "@ethereumjs/util": "npm:^8.1.0" + "@ethersproject/abi": "npm:^5.7.0" "@ethersproject/address": "npm:^5.7.0" "@ethersproject/bignumber": "npm:^5.7.0" "@ethersproject/contracts": "npm:^5.7.0" "@ethersproject/providers": "npm:^5.7.0" "@metamask/abi-utils": "npm:^2.0.3" - "@metamask/accounts-controller": "npm:^19.0.0" + "@metamask/accounts-controller": "npm:^20.0.0" "@metamask/approval-controller": "npm:^7.1.1" "@metamask/auto-changelog": "npm:^3.4.4" "@metamask/base-controller": "npm:^7.0.2" @@ -2141,12 +2149,12 @@ __metadata: "@metamask/controller-utils": "npm:^11.4.3" "@metamask/eth-query": "npm:^4.0.0" "@metamask/ethjs-provider-http": "npm:^0.3.0" - "@metamask/keyring-api": "npm:^8.1.3" - "@metamask/keyring-controller": "npm:^18.0.0" + "@metamask/keyring-api": "npm:^10.1.0" + "@metamask/keyring-controller": "npm:^19.0.0" "@metamask/metamask-eth-abis": "npm:^3.1.1" "@metamask/network-controller": "npm:^22.0.2" "@metamask/polling-controller": "npm:^12.0.1" - "@metamask/preferences-controller": "npm:^14.0.0" + "@metamask/preferences-controller": "npm:^15.0.0" "@metamask/rpc-errors": "npm:^7.0.1" "@metamask/utils": "npm:^10.0.0" "@types/bn.js": "npm:^5.1.5" @@ -2172,11 +2180,11 @@ __metadata: typescript: "npm:~5.2.2" uuid: "npm:^8.3.2" peerDependencies: - "@metamask/accounts-controller": ^19.0.0 + "@metamask/accounts-controller": ^20.0.0 "@metamask/approval-controller": ^7.0.0 - "@metamask/keyring-controller": ^18.0.0 + "@metamask/keyring-controller": ^19.0.0 "@metamask/network-controller": ^22.0.0 - "@metamask/preferences-controller": ^14.0.0 + "@metamask/preferences-controller": ^15.0.0 languageName: unknown linkType: soft @@ -2210,16 +2218,6 @@ __metadata: languageName: node linkType: hard -"@metamask/base-controller@npm:^6.0.2": - version: 6.0.3 - resolution: "@metamask/base-controller@npm:6.0.3" - dependencies: - "@metamask/utils": "npm:^9.1.0" - immer: "npm:^9.0.6" - checksum: 10/43e208627c673094e3b4a7766ef4df34cd5a9ec7f09721cc3e60123b69a22b82c68752b963d17f4ad925a01c6e5dc89f125cac33aeee4e90e0a8346a1d153aae - languageName: node - linkType: hard - "@metamask/base-controller@npm:^7.0.2, @metamask/base-controller@workspace:packages/base-controller": version: 0.0.0-use.local resolution: "@metamask/base-controller@workspace:packages/base-controller" @@ -2284,10 +2282,10 @@ __metadata: "@metamask/auto-changelog": "npm:^3.4.4" "@metamask/base-controller": "npm:^7.0.2" "@metamask/chain-api": "npm:^0.1.0" - "@metamask/keyring-api": "npm:^8.1.3" - "@metamask/snaps-controllers": "npm:^9.7.0" - "@metamask/snaps-sdk": "npm:^6.5.0" - "@metamask/snaps-utils": "npm:^8.1.1" + "@metamask/keyring-api": "npm:^10.1.0" + "@metamask/snaps-controllers": "npm:^9.10.0" + "@metamask/snaps-sdk": "npm:^6.7.0" + "@metamask/snaps-utils": "npm:^8.3.0" "@metamask/utils": "npm:^10.0.0" "@types/jest": "npm:^27.4.1" "@types/readable-stream": "npm:^2.3.0" @@ -2529,6 +2527,19 @@ __metadata: languageName: node linkType: hard +"@metamask/eth-json-rpc-filters@npm:^7.0.0": + version: 7.0.1 + resolution: "@metamask/eth-json-rpc-filters@npm:7.0.1" + dependencies: + "@metamask/eth-query": "npm:^4.0.0" + "@metamask/json-rpc-engine": "npm:^8.0.2" + "@metamask/safe-event-emitter": "npm:^3.0.0" + async-mutex: "npm:^0.5.0" + pify: "npm:^5.0.0" + checksum: 10/5200f75cee48dfd79deba5e4f1b16ff6827e606da617891f5cb7b59c43ae4ac8420cb9a6a9ca31705c47d2c3d32a3754e052b30f61fd293cc37f009c4fe20c12 + languageName: node + linkType: hard + "@metamask/eth-json-rpc-infura@npm:^10.0.0": version: 10.0.0 resolution: "@metamask/eth-json-rpc-infura@npm:10.0.0" @@ -2635,22 +2646,22 @@ __metadata: languageName: node linkType: hard -"@metamask/eth-snap-keyring@npm:^4.3.6": - version: 4.3.6 - resolution: "@metamask/eth-snap-keyring@npm:4.3.6" +"@metamask/eth-snap-keyring@npm:^5.0.1": + version: 5.0.1 + resolution: "@metamask/eth-snap-keyring@npm:5.0.1" dependencies: "@ethereumjs/tx": "npm:^4.2.0" - "@metamask/eth-sig-util": "npm:^7.0.3" - "@metamask/snaps-controllers": "npm:^9.7.0" - "@metamask/snaps-sdk": "npm:^6.5.1" - "@metamask/snaps-utils": "npm:^7.8.1" + "@metamask/eth-sig-util": "npm:^8.0.0" + "@metamask/snaps-controllers": "npm:^9.10.0" + "@metamask/snaps-sdk": "npm:^6.7.0" + "@metamask/snaps-utils": "npm:^8.3.0" "@metamask/superstruct": "npm:^3.1.0" "@metamask/utils": "npm:^9.2.1" "@types/uuid": "npm:^9.0.8" uuid: "npm:^9.0.1" peerDependencies: - "@metamask/keyring-api": ^8.1.3 - checksum: 10/378dce125ba9e38b9ba7d9b7124383b4fd8d2782207dc69e1ae9e262beb83f22044eae5200986d4c353de29e5283c289e56b3acb88c8971a63f9365bdde3d5b4 + "@metamask/keyring-api": ^10.1.0 + checksum: 10/4d9d700b7c2ecc1b17e92f716f7aeb04bbd03836601b5d37f639bed7fba4d5f00bafadf5359d2416c319cdf18eb2f9417c7353654737af87a6e8579d5e5bab79 languageName: node linkType: hard @@ -2868,18 +2879,18 @@ __metadata: languageName: unknown linkType: soft -"@metamask/json-rpc-engine@npm:^9.0.1, @metamask/json-rpc-engine@npm:^9.0.2": - version: 9.0.3 - resolution: "@metamask/json-rpc-engine@npm:9.0.3" +"@metamask/json-rpc-engine@npm:^8.0.2": + version: 8.0.2 + resolution: "@metamask/json-rpc-engine@npm:8.0.2" dependencies: - "@metamask/rpc-errors": "npm:^6.3.1" + "@metamask/rpc-errors": "npm:^6.2.1" "@metamask/safe-event-emitter": "npm:^3.0.0" - "@metamask/utils": "npm:^9.1.0" - checksum: 10/23a3cafb5869f6d5867105e3570ac4e214a72dda0b4b428cde6bae8856ec838c822b174f8cea054108122531d662cf93a65e92e1ee07da0485d5d0c0e5a1fca6 + "@metamask/utils": "npm:^8.3.0" + checksum: 10/f088f4b648b9b55875b56e8237853e7282f13302a9db6a1f9bba06314dfd6cd0a23b3d27f8fde05a157b97ebb03b67bc2699ba455c99553dfb2ecccd73ab3474 languageName: node linkType: hard -"@metamask/json-rpc-middleware-stream@npm:^8.0.1, @metamask/json-rpc-middleware-stream@npm:^8.0.2, @metamask/json-rpc-middleware-stream@workspace:packages/json-rpc-middleware-stream": +"@metamask/json-rpc-middleware-stream@npm:^8.0.5, @metamask/json-rpc-middleware-stream@workspace:packages/json-rpc-middleware-stream": version: 0.0.0-use.local resolution: "@metamask/json-rpc-middleware-stream@workspace:packages/json-rpc-middleware-stream" dependencies: @@ -2915,23 +2926,24 @@ __metadata: languageName: node linkType: hard -"@metamask/keyring-api@npm:^8.1.3": - version: 8.1.3 - resolution: "@metamask/keyring-api@npm:8.1.3" +"@metamask/keyring-api@npm:^10.1.0": + version: 10.1.0 + resolution: "@metamask/keyring-api@npm:10.1.0" dependencies: - "@metamask/snaps-sdk": "npm:^6.5.1" + "@metamask/snaps-sdk": "npm:^6.7.0" "@metamask/superstruct": "npm:^3.1.0" "@metamask/utils": "npm:^9.2.1" "@types/uuid": "npm:^9.0.8" bech32: "npm:^2.0.0" uuid: "npm:^9.0.1" + webextension-polyfill: "npm:^0.12.0" peerDependencies: - "@metamask/providers": ^17.2.0 - checksum: 10/9857b6286760d22b1b7102ea8bdf03ebf56c71e9f0adee19a2230def6b7a9230561c1a3bfcb308735b79ab9a5afa9afd07a1617c1d165f63d193cd6a6b6e7a15 + "@metamask/providers": ^18.1.0 + checksum: 10/de22b9f5f3aecc290210fa78161e157aa8358f8dad421a093c9f6dbe35c4755067472a732f10d1ddbfba789e871c64edd8ea1c4c7316a392b214a187efd46ebe languageName: node linkType: hard -"@metamask/keyring-controller@npm:^18.0.0, @metamask/keyring-controller@workspace:packages/keyring-controller": +"@metamask/keyring-controller@npm:^19.0.0, @metamask/keyring-controller@workspace:packages/keyring-controller": version: 0.0.0-use.local resolution: "@metamask/keyring-controller@workspace:packages/keyring-controller" dependencies: @@ -2947,7 +2959,7 @@ __metadata: "@metamask/eth-hd-keyring": "npm:^7.0.4" "@metamask/eth-sig-util": "npm:^8.0.0" "@metamask/eth-simple-keyring": "npm:^6.0.5" - "@metamask/keyring-api": "npm:^8.1.3" + "@metamask/keyring-api": "npm:^10.1.0" "@metamask/message-manager": "npm:^11.0.1" "@metamask/scure-bip39": "npm:^2.1.1" "@metamask/utils": "npm:^10.0.0" @@ -3018,14 +3030,25 @@ __metadata: version: 0.0.0-use.local resolution: "@metamask/multichain@workspace:packages/multichain" dependencies: + "@metamask/api-specs": "npm:^0.10.12" "@metamask/auto-changelog": "npm:^3.4.4" + "@metamask/controller-utils": "npm:^11.4.3" + "@metamask/eth-json-rpc-filters": "npm:^7.0.0" + "@metamask/network-controller": "npm:^22.0.2" + "@metamask/permission-controller": "npm:^11.0.3" + "@metamask/rpc-errors": "npm:^7.0.1" + "@metamask/utils": "npm:^10.0.0" "@types/jest": "npm:^27.4.1" deepmerge: "npm:^4.2.2" jest: "npm:^27.5.1" + lodash: "npm:^4.17.21" ts-jest: "npm:^27.1.4" typedoc: "npm:^0.24.8" typedoc-plugin-missing-exports: "npm:^2.0.0" typescript: "npm:~5.2.2" + peerDependencies: + "@metamask/network-controller": ^22.0.0 + "@metamask/permission-controller": ^11.0.0 languageName: unknown linkType: soft @@ -3126,8 +3149,8 @@ __metadata: "@metamask/auto-changelog": "npm:^3.4.4" "@metamask/base-controller": "npm:^7.0.2" "@metamask/controller-utils": "npm:^11.4.3" - "@metamask/keyring-controller": "npm:^18.0.0" - "@metamask/profile-sync-controller": "npm:^1.0.0" + "@metamask/keyring-controller": "npm:^19.0.0" + "@metamask/profile-sync-controller": "npm:^2.0.0" "@metamask/utils": "npm:^10.0.0" "@types/jest": "npm:^27.4.1" "@types/readable-stream": "npm:^2.3.0" @@ -3145,8 +3168,8 @@ __metadata: typescript: "npm:~5.2.2" uuid: "npm:^8.3.2" peerDependencies: - "@metamask/keyring-controller": ^18.0.0 - "@metamask/profile-sync-controller": ^1.0.0 + "@metamask/keyring-controller": ^19.0.0 + "@metamask/profile-sync-controller": ^2.0.0 languageName: unknown linkType: soft @@ -3180,7 +3203,7 @@ __metadata: languageName: node linkType: hard -"@metamask/permission-controller@npm:^11.0.0, @metamask/permission-controller@npm:^11.0.3, @metamask/permission-controller@workspace:packages/permission-controller": +"@metamask/permission-controller@npm:^11.0.3, @metamask/permission-controller@workspace:packages/permission-controller": version: 0.0.0-use.local resolution: "@metamask/permission-controller@workspace:packages/permission-controller" dependencies: @@ -3287,14 +3310,14 @@ __metadata: languageName: node linkType: hard -"@metamask/preferences-controller@npm:^14.0.0, @metamask/preferences-controller@workspace:packages/preferences-controller": +"@metamask/preferences-controller@npm:^15.0.0, @metamask/preferences-controller@workspace:packages/preferences-controller": version: 0.0.0-use.local resolution: "@metamask/preferences-controller@workspace:packages/preferences-controller" dependencies: "@metamask/auto-changelog": "npm:^3.4.4" "@metamask/base-controller": "npm:^7.0.2" "@metamask/controller-utils": "npm:^11.4.3" - "@metamask/keyring-controller": "npm:^18.0.0" + "@metamask/keyring-controller": "npm:^19.0.0" "@types/jest": "npm:^27.4.1" deepmerge: "npm:^4.2.2" jest: "npm:^27.5.1" @@ -3304,24 +3327,24 @@ __metadata: typedoc-plugin-missing-exports: "npm:^2.0.0" typescript: "npm:~5.2.2" peerDependencies: - "@metamask/keyring-controller": ^18.0.0 + "@metamask/keyring-controller": ^19.0.0 languageName: unknown linkType: soft -"@metamask/profile-sync-controller@npm:^1.0.0, @metamask/profile-sync-controller@workspace:packages/profile-sync-controller": +"@metamask/profile-sync-controller@npm:^2.0.0, @metamask/profile-sync-controller@workspace:packages/profile-sync-controller": version: 0.0.0-use.local resolution: "@metamask/profile-sync-controller@workspace:packages/profile-sync-controller" dependencies: "@lavamoat/allow-scripts": "npm:^3.0.4" - "@metamask/accounts-controller": "npm:^19.0.0" + "@metamask/accounts-controller": "npm:^20.0.0" "@metamask/auto-changelog": "npm:^3.4.4" "@metamask/base-controller": "npm:^7.0.2" - "@metamask/keyring-api": "npm:^8.1.3" - "@metamask/keyring-controller": "npm:^18.0.0" + "@metamask/keyring-api": "npm:^10.1.0" + "@metamask/keyring-controller": "npm:^19.0.0" "@metamask/network-controller": "npm:^22.0.2" - "@metamask/snaps-controllers": "npm:^9.7.0" - "@metamask/snaps-sdk": "npm:^6.5.0" - "@metamask/snaps-utils": "npm:^8.1.1" + "@metamask/snaps-controllers": "npm:^9.10.0" + "@metamask/snaps-sdk": "npm:^6.7.0" + "@metamask/snaps-utils": "npm:^8.3.0" "@noble/ciphers": "npm:^0.5.2" "@noble/hashes": "npm:^1.4.0" "@types/jest": "npm:^27.4.1" @@ -3338,23 +3361,23 @@ __metadata: typedoc-plugin-missing-exports: "npm:^2.0.0" typescript: "npm:~5.2.2" peerDependencies: - "@metamask/accounts-controller": ^19.0.0 - "@metamask/keyring-controller": ^18.0.0 + "@metamask/accounts-controller": ^20.0.0 + "@metamask/keyring-controller": ^19.0.0 "@metamask/network-controller": ^22.0.0 - "@metamask/snaps-controllers": ^9.7.0 + "@metamask/snaps-controllers": ^9.10.0 languageName: unknown linkType: soft -"@metamask/providers@npm:^17.1.2": - version: 17.1.2 - resolution: "@metamask/providers@npm:17.1.2" +"@metamask/providers@npm:^18.1.1": + version: 18.1.1 + resolution: "@metamask/providers@npm:18.1.1" dependencies: - "@metamask/json-rpc-engine": "npm:^9.0.1" - "@metamask/json-rpc-middleware-stream": "npm:^8.0.1" + "@metamask/json-rpc-engine": "npm:^10.0.1" + "@metamask/json-rpc-middleware-stream": "npm:^8.0.5" "@metamask/object-multiplex": "npm:^2.0.0" - "@metamask/rpc-errors": "npm:^6.3.1" + "@metamask/rpc-errors": "npm:^7.0.1" "@metamask/safe-event-emitter": "npm:^3.1.1" - "@metamask/utils": "npm:^9.0.0" + "@metamask/utils": "npm:^10.0.0" detect-browser: "npm:^5.2.0" extension-port-stream: "npm:^4.1.0" fast-deep-equal: "npm:^3.1.3" @@ -3362,7 +3385,7 @@ __metadata: readable-stream: "npm:^3.6.2" peerDependencies: webextension-polyfill: ^0.10.0 || ^0.11.0 || ^0.12.0 - checksum: 10/bf555f9774e340d4497c09c980094e759a198f11c5a78b403e639cf01904b9ec3b19a5e9f53567465dd8739da4138e2021ac9a404a99b1a6022add12a4b19a31 + checksum: 10/dca428d84e490343d85921d4fb09216a0b64be59a036d7b4f7b5ca4e2581c29a4106d58ff9dfe0650dc2b9387dd2adad508fc61073a9fda8ebde8ee3a5137abe languageName: node linkType: hard @@ -3414,7 +3437,7 @@ __metadata: languageName: unknown linkType: soft -"@metamask/rpc-errors@npm:^6.3.1": +"@metamask/rpc-errors@npm:^6.2.1": version: 6.3.1 resolution: "@metamask/rpc-errors@npm:6.3.1" dependencies: @@ -3488,7 +3511,7 @@ __metadata: "@metamask/base-controller": "npm:^7.0.2" "@metamask/controller-utils": "npm:^11.4.3" "@metamask/eth-sig-util": "npm:^8.0.0" - "@metamask/keyring-controller": "npm:^18.0.0" + "@metamask/keyring-controller": "npm:^19.0.0" "@metamask/logging-controller": "npm:^6.0.2" "@metamask/network-controller": "npm:^22.0.2" "@metamask/utils": "npm:^10.0.0" @@ -3504,19 +3527,12 @@ __metadata: uuid: "npm:^8.3.2" peerDependencies: "@metamask/approval-controller": ^7.0.0 - "@metamask/keyring-controller": ^18.0.0 + "@metamask/keyring-controller": ^19.0.0 "@metamask/logging-controller": ^6.0.0 "@metamask/network-controller": ^22.0.0 languageName: unknown linkType: soft -"@metamask/slip44@npm:^3.1.0": - version: 3.1.0 - resolution: "@metamask/slip44@npm:3.1.0" - checksum: 10/83f902c455468f1ec252d0554cd4ebf8da1fc9a27ec7199b81e265e5e8710fad86eaa71d86f24500f9db6626007ad71b1380b239e2104e7e558a061393b066fa - languageName: node - linkType: hard - "@metamask/slip44@npm:^4.0.0": version: 4.0.0 resolution: "@metamask/slip44@npm:4.0.0" @@ -3524,24 +3540,24 @@ __metadata: languageName: node linkType: hard -"@metamask/snaps-controllers@npm:^9.7.0": - version: 9.7.0 - resolution: "@metamask/snaps-controllers@npm:9.7.0" +"@metamask/snaps-controllers@npm:^9.10.0": + version: 9.12.0 + resolution: "@metamask/snaps-controllers@npm:9.12.0" dependencies: - "@metamask/approval-controller": "npm:^7.0.2" - "@metamask/base-controller": "npm:^6.0.2" - "@metamask/json-rpc-engine": "npm:^9.0.2" - "@metamask/json-rpc-middleware-stream": "npm:^8.0.2" + "@metamask/approval-controller": "npm:^7.1.1" + "@metamask/base-controller": "npm:^7.0.2" + "@metamask/json-rpc-engine": "npm:^10.0.1" + "@metamask/json-rpc-middleware-stream": "npm:^8.0.5" "@metamask/object-multiplex": "npm:^2.0.0" - "@metamask/permission-controller": "npm:^11.0.0" + "@metamask/permission-controller": "npm:^11.0.3" "@metamask/phishing-controller": "npm:^12.0.2" "@metamask/post-message-stream": "npm:^8.1.1" - "@metamask/rpc-errors": "npm:^6.3.1" - "@metamask/snaps-registry": "npm:^3.2.1" - "@metamask/snaps-rpc-methods": "npm:^11.1.1" - "@metamask/snaps-sdk": "npm:^6.5.0" - "@metamask/snaps-utils": "npm:^8.1.1" - "@metamask/utils": "npm:^9.2.1" + "@metamask/rpc-errors": "npm:^7.0.1" + "@metamask/snaps-registry": "npm:^3.2.2" + "@metamask/snaps-rpc-methods": "npm:^11.5.1" + "@metamask/snaps-sdk": "npm:^6.10.0" + "@metamask/snaps-utils": "npm:^8.5.0" + "@metamask/utils": "npm:^10.0.0" "@xstate/fsm": "npm:^2.0.0" browserify-zlib: "npm:^0.2.0" concat-stream: "npm:^2.0.0" @@ -3551,103 +3567,73 @@ __metadata: nanoid: "npm:^3.1.31" readable-stream: "npm:^3.6.2" readable-web-to-node-stream: "npm:^3.0.2" + semver: "npm:^7.5.4" tar-stream: "npm:^3.1.7" peerDependencies: - "@metamask/snaps-execution-environments": ^6.7.1 + "@metamask/snaps-execution-environments": ^6.9.2 peerDependenciesMeta: "@metamask/snaps-execution-environments": optional: true - checksum: 10/8a353819e60330ef3e338a40b1115d4c830b92b1cc0c92afb2b34bf46fbc906e6da5f905654e1d486cacd40b7025ec74d3cd01cb935090035ce9f1021ce5469f + checksum: 10/8d411ff2cfd43e62fe780092e935a1d977379488407b56cca1390edfa9408871cbaf3599f6e6ee999340d46fd3650f225a3270ceec9492c6f2dc4d93538c25ae languageName: node linkType: hard -"@metamask/snaps-registry@npm:^3.2.1": - version: 3.2.1 - resolution: "@metamask/snaps-registry@npm:3.2.1" +"@metamask/snaps-registry@npm:^3.2.2": + version: 3.2.2 + resolution: "@metamask/snaps-registry@npm:3.2.2" dependencies: "@metamask/superstruct": "npm:^3.1.0" - "@metamask/utils": "npm:^9.0.0" + "@metamask/utils": "npm:^10.0.0" "@noble/curves": "npm:^1.2.0" "@noble/hashes": "npm:^1.3.2" - checksum: 10/b2a413f27db9b5701d3773017035ee1e153734a25363e3877f44be4a70f51c48d77ad0ac8f1e96a7d732d2079a4b259896f361b3cba1ae0bf0bbc1075406f178 + checksum: 10/ca8239e838bbb913435e166136bbc9bd7222c4bd87b1525fa7ae3cdf2e0b868b5d4d90a67d1ed49633d566bdef9243abdbf5f5937b85a85d24184087f555813e languageName: node linkType: hard -"@metamask/snaps-rpc-methods@npm:^11.1.1": - version: 11.1.1 - resolution: "@metamask/snaps-rpc-methods@npm:11.1.1" +"@metamask/snaps-rpc-methods@npm:^11.5.1": + version: 11.5.1 + resolution: "@metamask/snaps-rpc-methods@npm:11.5.1" dependencies: "@metamask/key-tree": "npm:^9.1.2" - "@metamask/permission-controller": "npm:^11.0.0" - "@metamask/rpc-errors": "npm:^6.3.1" - "@metamask/snaps-sdk": "npm:^6.5.0" - "@metamask/snaps-utils": "npm:^8.1.1" + "@metamask/permission-controller": "npm:^11.0.3" + "@metamask/rpc-errors": "npm:^7.0.1" + "@metamask/snaps-sdk": "npm:^6.10.0" + "@metamask/snaps-utils": "npm:^8.5.0" "@metamask/superstruct": "npm:^3.1.0" - "@metamask/utils": "npm:^9.2.1" + "@metamask/utils": "npm:^10.0.0" "@noble/hashes": "npm:^1.3.1" - checksum: 10/e23279dabc6f4ffe2c6c4a7003a624cd5e79b558d7981ec12c23e54a5da25cb7be9bc7bddfa8b2ce84af28a89b42076a2c14ab004b7a976a4426bf1e1de71b5b - languageName: node - linkType: hard - -"@metamask/snaps-sdk@npm:^6.1.0, @metamask/snaps-sdk@npm:^6.5.0, @metamask/snaps-sdk@npm:^6.5.1": - version: 6.5.1 - resolution: "@metamask/snaps-sdk@npm:6.5.1" - dependencies: - "@metamask/key-tree": "npm:^9.1.2" - "@metamask/providers": "npm:^17.1.2" - "@metamask/rpc-errors": "npm:^6.3.1" - "@metamask/superstruct": "npm:^3.1.0" - "@metamask/utils": "npm:^9.2.1" - checksum: 10/7831fb2ca61a32ad43e971de9307b221f6bd2f65c84a3286f350cfdd2396166c58db6cd2fac9711654a211c8dc2049e591a79ab720b3f5ad562e434f75e95d32 + checksum: 10/0f999a5dd64f1b1123366f448ae833f0e95a415791600bb535959ba67d2269fbe3c4504d47f04db71bafa79a9a87d6b832fb2e2b5ef29567078c95bce2638f35 languageName: node linkType: hard -"@metamask/snaps-utils@npm:^7.8.1": - version: 7.8.1 - resolution: "@metamask/snaps-utils@npm:7.8.1" +"@metamask/snaps-sdk@npm:^6.10.0, @metamask/snaps-sdk@npm:^6.7.0": + version: 6.10.0 + resolution: "@metamask/snaps-sdk@npm:6.10.0" dependencies: - "@babel/core": "npm:^7.23.2" - "@babel/types": "npm:^7.23.0" - "@metamask/base-controller": "npm:^6.0.2" "@metamask/key-tree": "npm:^9.1.2" - "@metamask/permission-controller": "npm:^11.0.0" - "@metamask/rpc-errors": "npm:^6.3.1" - "@metamask/slip44": "npm:^3.1.0" - "@metamask/snaps-registry": "npm:^3.2.1" - "@metamask/snaps-sdk": "npm:^6.1.0" + "@metamask/providers": "npm:^18.1.1" + "@metamask/rpc-errors": "npm:^7.0.1" "@metamask/superstruct": "npm:^3.1.0" - "@metamask/utils": "npm:^9.1.0" - "@noble/hashes": "npm:^1.3.1" - "@scure/base": "npm:^1.1.1" - chalk: "npm:^4.1.2" - cron-parser: "npm:^4.5.0" - fast-deep-equal: "npm:^3.1.3" - fast-json-stable-stringify: "npm:^2.1.0" - fast-xml-parser: "npm:^4.3.4" - marked: "npm:^12.0.1" - rfdc: "npm:^1.3.0" - semver: "npm:^7.5.4" - ses: "npm:^1.1.0" - validate-npm-package-name: "npm:^5.0.0" - checksum: 10/572108aafbad970910ffb3605cf9eb4675ede0d69ff2bd37515da7f071de2065a55c73d6dc44dbe70bbd9c3ff0dfe29d40fd16badd925a4b8504db293265ca2f + "@metamask/utils": "npm:^10.0.0" + checksum: 10/02f04536328a64ff1e9e48fb6b109698d6d83f42af5666a9758ccb1e7a1e67c0c2e296ef2fef419dd3d1c8f26bbf30b9f31911a1baa66f044f21cd0ecb7a11a7 languageName: node linkType: hard -"@metamask/snaps-utils@npm:^8.1.1": - version: 8.1.1 - resolution: "@metamask/snaps-utils@npm:8.1.1" +"@metamask/snaps-utils@npm:^8.3.0, @metamask/snaps-utils@npm:^8.5.0": + version: 8.5.2 + resolution: "@metamask/snaps-utils@npm:8.5.2" dependencies: "@babel/core": "npm:^7.23.2" "@babel/types": "npm:^7.23.0" - "@metamask/base-controller": "npm:^6.0.2" + "@metamask/base-controller": "npm:^7.0.2" "@metamask/key-tree": "npm:^9.1.2" - "@metamask/permission-controller": "npm:^11.0.0" - "@metamask/rpc-errors": "npm:^6.3.1" + "@metamask/permission-controller": "npm:^11.0.3" + "@metamask/rpc-errors": "npm:^7.0.1" "@metamask/slip44": "npm:^4.0.0" - "@metamask/snaps-registry": "npm:^3.2.1" - "@metamask/snaps-sdk": "npm:^6.5.0" + "@metamask/snaps-registry": "npm:^3.2.2" + "@metamask/snaps-sdk": "npm:^6.10.0" "@metamask/superstruct": "npm:^3.1.0" - "@metamask/utils": "npm:^9.2.1" + "@metamask/utils": "npm:^10.0.0" "@noble/hashes": "npm:^1.3.1" "@scure/base": "npm:^1.1.1" chalk: "npm:^4.1.2" @@ -3660,7 +3646,7 @@ __metadata: semver: "npm:^7.5.4" ses: "npm:^1.1.0" validate-npm-package-name: "npm:^5.0.0" - checksum: 10/f4ceb52a1f9578993c88c82a67f4f041309af51c83ff5caa3fed080f36b54d14ea7da807ce1cf19a13600dd0e77c51af70398e8c7bb78f0ba99a037f4d22610f + checksum: 10/e5d1344f948473e82d71007d2570272073cf070f40aa7746692a6d5e6f02cfce66a747cf50f439d32b29a3f6588486182453b26973f0d0c1d9f47914591d5790 languageName: node linkType: hard @@ -3678,7 +3664,7 @@ __metadata: languageName: node linkType: hard -"@metamask/transaction-controller@npm:^39.0.0, @metamask/transaction-controller@workspace:packages/transaction-controller": +"@metamask/transaction-controller@npm:^40.1.0, @metamask/transaction-controller@workspace:packages/transaction-controller": version: 0.0.0-use.local resolution: "@metamask/transaction-controller@workspace:packages/transaction-controller" dependencies: @@ -3689,7 +3675,7 @@ __metadata: "@ethersproject/abi": "npm:^5.7.0" "@ethersproject/contracts": "npm:^5.7.0" "@ethersproject/providers": "npm:^5.7.0" - "@metamask/accounts-controller": "npm:^19.0.0" + "@metamask/accounts-controller": "npm:^20.0.0" "@metamask/approval-controller": "npm:^7.1.1" "@metamask/auto-changelog": "npm:^3.4.4" "@metamask/base-controller": "npm:^7.0.2" @@ -3698,7 +3684,6 @@ __metadata: "@metamask/eth-query": "npm:^4.0.0" "@metamask/ethjs-provider-http": "npm:^0.3.0" "@metamask/gas-fee-controller": "npm:^22.0.1" - "@metamask/keyring-api": "npm:^8.1.3" "@metamask/metamask-eth-abis": "npm:^3.1.1" "@metamask/network-controller": "npm:^22.0.2" "@metamask/nonce-tracker": "npm:^6.0.0" @@ -3724,7 +3709,7 @@ __metadata: uuid: "npm:^8.3.2" peerDependencies: "@babel/runtime": ^7.23.9 - "@metamask/accounts-controller": ^19.0.0 + "@metamask/accounts-controller": ^20.0.0 "@metamask/approval-controller": ^7.0.0 "@metamask/gas-fee-controller": ^22.0.0 "@metamask/network-controller": ^22.0.0 @@ -3741,12 +3726,12 @@ __metadata: "@metamask/controller-utils": "npm:^11.4.3" "@metamask/eth-query": "npm:^4.0.0" "@metamask/gas-fee-controller": "npm:^22.0.1" - "@metamask/keyring-controller": "npm:^18.0.0" + "@metamask/keyring-controller": "npm:^19.0.0" "@metamask/network-controller": "npm:^22.0.2" "@metamask/polling-controller": "npm:^12.0.1" "@metamask/rpc-errors": "npm:^7.0.1" "@metamask/superstruct": "npm:^3.1.0" - "@metamask/transaction-controller": "npm:^39.0.0" + "@metamask/transaction-controller": "npm:^40.1.0" "@metamask/utils": "npm:^10.0.0" "@types/jest": "npm:^27.4.1" bn.js: "npm:^5.2.1" @@ -3762,9 +3747,9 @@ __metadata: peerDependencies: "@metamask/approval-controller": ^7.0.0 "@metamask/gas-fee-controller": ^22.0.0 - "@metamask/keyring-controller": ^18.0.0 + "@metamask/keyring-controller": ^19.0.0 "@metamask/network-controller": ^22.0.0 - "@metamask/transaction-controller": ^39.0.0 + "@metamask/transaction-controller": ^40.0.0 languageName: unknown linkType: soft @@ -3785,7 +3770,7 @@ __metadata: languageName: node linkType: hard -"@metamask/utils@npm:^8.2.0": +"@metamask/utils@npm:^8.2.0, @metamask/utils@npm:^8.3.0": version: 8.5.0 resolution: "@metamask/utils@npm:8.5.0" dependencies: @@ -12497,7 +12482,7 @@ __metadata: languageName: node linkType: hard -"webextension-polyfill@npm:>=0.10.0 <1.0": +"webextension-polyfill@npm:>=0.10.0 <1.0, webextension-polyfill@npm:^0.12.0": version: 0.12.0 resolution: "webextension-polyfill@npm:0.12.0" checksum: 10/77e648b958b573ef075e75a0c180e2bbd74dee17b3145e86d21fcbb168c4999e4a311654fe634b8178997bee9b35ea5808d8d3d3e5ff2ad138f197f4f0ea75d9