Make transpilation setup local to this repo (#4041)

Co-authored-by: Ernesto García <ernestognw@gmail.com>
This commit is contained in:
Francisco
2023-05-09 19:52:23 +01:00
committed by GitHub
parent 34d926dd7e
commit 51294b7480
19 changed files with 894 additions and 162 deletions

View File

@ -14,7 +14,6 @@ concurrency:
jobs:
lint:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -37,19 +36,36 @@ jobs:
- name: Check linearisation of the inheritance graph
run: npm run test:inheritance
- name: Check proceduraly generated contracts are up-to-date
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
run: npm run test:generation
- name: Compare gas costs
uses: ./.github/actions/gas-compare
with:
token: ${{ github.token }}
tests-upgradeable:
runs-on: ubuntu-latest
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Include history so patch conflicts are resolved automatically
- name: Set up environment
uses: ./.github/actions/setup
- name: Transpile to upgradeable
run: bash scripts/upgradeable/transpile.sh
- name: Run tests
run: npm run test
env:
NODE_OPTIONS: --max_old_space_size=4096
- name: Check linearisation of the inheritance graph
run: npm run test:inheritance
- name: Check storage layout
uses: ./.github/actions/storage-layout
with:
token: ${{ github.token }}
foundry-tests:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
tests-foundry:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -63,7 +79,6 @@ jobs:
run: forge test -vv
coverage:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -77,7 +92,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
slither:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -89,7 +103,6 @@ jobs:
node-version: 18.15
codespell:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

View File

@ -1,4 +1,4 @@
name: Upgradeable Trigger
name: transpile upgradeable
on:
push:
@ -7,17 +7,24 @@ on:
- release-v*
jobs:
trigger:
transpile:
environment: push-upgradeable
runs-on: ubuntu-latest
steps:
- id: app
uses: getsentry/action-github-app-token@v2
- uses: actions/checkout@v3
with:
app_id: ${{ secrets.UPGRADEABLE_APP_ID }}
private_key: ${{ secrets.UPGRADEABLE_APP_PK }}
- run: |
curl -X POST \
https://api.github.com/repos/OpenZeppelin/openzeppelin-contracts-upgradeable/dispatches \
-H 'Accept: application/vnd.github.v3+json' \
-H 'Authorization: token ${{ steps.app.outputs.token }}' \
-d '{ "event_type": "Update", "client_payload": { "ref": "${{ github.ref }}" } }'
repository: OpenZeppelin/openzeppelin-contracts-upgradeable
fetch-depth: 0
token: ${{ secrets.GH_TOKEN_UPGRADEABLE }}
- name: Fetch current non-upgradeable branch
run: |
git fetch "https://github.com/${{ github.repository }}.git" "$REF"
git checkout FETCH_HEAD
env:
REF: ${{ github.ref }}
- name: Set up environment
uses: ./.github/actions/setup
- run: bash scripts/git-user-config.sh
- name: Transpile to upgradeable
run: bash scripts/upgradeable/transpile-onto.sh ${{ github.ref_name }} origin/${{ github.ref_name }}
- run: git push origin ${{ github.ref_name }}

View File

@ -44,14 +44,14 @@ abstract contract EIP712 is IERC5267 {
uint256 private immutable _cachedChainId;
address private immutable _cachedThis;
bytes32 private immutable _hashedName;
bytes32 private immutable _hashedVersion;
ShortString private immutable _name;
ShortString private immutable _version;
string private _nameFallback;
string private _versionFallback;
bytes32 private immutable _hashedName;
bytes32 private immutable _hashedVersion;
/**
* @dev Initializes the domain separator and parameter caches.
*

View File

@ -89,10 +89,11 @@ module.exports = {
},
},
exposed: {
initializers: true,
exclude: [
'vendor/**/*',
// overflow clash
'utils/Timers.sol',
// Exclude Timers from hardhat-exposed because its overloaded functions are not transformed correctly
'utils/Timers{,Upgradeable}.sol',
],
},
docgen: require('./docs/config'),

24
hardhat/env-artifacts.js Normal file
View File

@ -0,0 +1,24 @@
const { HardhatError } = require('hardhat/internal/core/errors');
// Modifies `artifacts.require(X)` so that instead of X it loads the XUpgradeable contract.
// This allows us to run the same test suite on both the original and the transpiled and renamed Upgradeable contracts.
extendEnvironment(env => {
const artifactsRequire = env.artifacts.require;
env.artifacts.require = name => {
for (const suffix of ['UpgradeableWithInit', 'Upgradeable', '']) {
try {
return artifactsRequire(name + suffix);
} catch (e) {
// HH700: Artifact not found - from https://hardhat.org/hardhat-runner/docs/errors#HH700
if (HardhatError.isHardhatError(e) && e.number === 700 && suffix !== '') {
continue;
} else {
throw e;
}
}
}
throw new Error('Unreachable');
};
});

View File

@ -0,0 +1,35 @@
const { internalTask } = require('hardhat/config');
const { TASK_TEST_GET_TEST_FILES } = require('hardhat/builtin-tasks/task-names');
// Modifies `hardhat test` to skip the proxy tests after proxies are removed by the transpiler for upgradeability.
internalTask(TASK_TEST_GET_TEST_FILES).setAction(async ({ testFiles }, { config }) => {
if (testFiles.length !== 0) {
return testFiles;
}
const globAsync = require('glob');
const path = require('path');
const { promises: fs } = require('fs');
const { promisify } = require('util');
const glob = promisify(globAsync);
const hasProxies = await fs
.access(path.join(config.paths.sources, 'proxy/Proxy.sol'))
.then(() => true)
.catch(() => false);
return await glob(path.join(config.paths.tests, '**/*.js'), {
ignore: hasProxies
? []
: [
'proxy/beacon/BeaconProxy.test.js',
'proxy/beacon/UpgradeableBeacon.test.js',
'proxy/ERC1967/ERC1967Proxy.test.js',
'proxy/transparent/ProxyAdmin.test.js',
'proxy/transparent/TransparentUpgradeableProxy.test.js',
'proxy/utils/UUPSUpgradeable.test.js',
].map(p => path.join(config.paths.tests, p)),
});
});

14
package-lock.json generated
View File

@ -31,7 +31,7 @@
"glob": "^8.0.3",
"graphlib": "^2.1.8",
"hardhat": "^2.9.1",
"hardhat-exposed": "^0.3.2",
"hardhat-exposed": "^0.3.3",
"hardhat-gas-reporter": "^1.0.4",
"hardhat-ignore-warnings": "^0.2.0",
"keccak256": "^1.0.2",
@ -8269,9 +8269,9 @@
}
},
"node_modules/hardhat-exposed": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.2.tgz",
"integrity": "sha512-qhi60I2bfjoPZwKgrY7BIpuUiBE7aC/bHN2MzHxPcZdxaeFnjKJ50n59LE7yK3GK2qYzE8DMjzqfnH6SlKPUjw==",
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.3.tgz",
"integrity": "sha512-AFBM3IUlSU9wkt3886kYbCSzZteB4OQt0ciehPUrCgY/Tazn6g2cxsmoIZT8O8va1R2PtXd9PRC4KZ6WiSVXOw==",
"dev": true,
"dependencies": {
"micromatch": "^4.0.4",
@ -22857,9 +22857,9 @@
}
},
"hardhat-exposed": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.2.tgz",
"integrity": "sha512-qhi60I2bfjoPZwKgrY7BIpuUiBE7aC/bHN2MzHxPcZdxaeFnjKJ50n59LE7yK3GK2qYzE8DMjzqfnH6SlKPUjw==",
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.3.tgz",
"integrity": "sha512-AFBM3IUlSU9wkt3886kYbCSzZteB4OQt0ciehPUrCgY/Tazn6g2cxsmoIZT8O8va1R2PtXd9PRC4KZ6WiSVXOw==",
"dev": true,
"requires": {
"micromatch": "^4.0.4",

View File

@ -72,7 +72,7 @@
"glob": "^8.0.3",
"graphlib": "^2.1.8",
"hardhat": "^2.9.1",
"hardhat-exposed": "^0.3.2",
"hardhat-exposed": "^0.3.3",
"hardhat-gas-reporter": "^1.0.4",
"hardhat-ignore-warnings": "^0.2.0",
"keccak256": "^1.0.2",

View File

@ -0,0 +1,21 @@
The upgradeable variant of OpenZeppelin Contracts is automatically generated from the original Solidity code. We call this process "transpilation" and it is implemented by our [Upgradeability Transpiler](https://github.com/OpenZeppelin/openzeppelin-transpiler/).
When the `master` branch or `release-v*` branches are updated, the code is transpiled and pushed to [OpenZeppelin/openzeppelin-contracts-upgradeable](https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable) by the `upgradeable.yml` workflow.
## `transpile.sh`
Applies patches and invokes the transpiler with the command line flags we need for our requirements (for example, excluding certain files).
## `transpile-onto.sh`
```
bash scripts/upgradeable/transpile-onto.sh <target> [<base>]
```
Transpiles the contents of the current git branch and commits the result as a new commit on branch `<target>`. If branch `<target>` doesn't exist, it will copy the commit history of `[<base>]` (this is used in GitHub Actions, but is usually not necessary locally).
## `patch-apply.sh` & `patch-save.sh`
Some of the upgradeable contract variants require ad-hoc changes that are not implemented by the transpiler. These changes are implemented by patches stored in `upgradeable.patch` in this directory. `patch-apply.sh` applies these patches.
If the patches fail to apply due to changes in the repo, the conflicts have to be resolved manually. Once fixed, `patch-save.sh` will take the changes staged in Git and update `upgradeable.patch` to match.

View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -euo pipefail
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
PATCH="$DIRNAME/upgradeable.patch"
error() {
echo Error: "$*" >&2
exit 1
}
if ! git diff-files --quiet ":!$PATCH" || ! git diff-index --quiet HEAD ":!$PATCH"; then
error "Repository must have no staged or unstaged changes"
fi
if ! git apply -3 "$PATCH"; then
error "Fix conflicts and run $DIRNAME/patch-save.sh"
fi

View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -euo pipefail
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
PATCH="$DIRNAME/upgradeable.patch"
error() {
echo Error: "$*" >&2
exit 1
}
if ! git diff-files --quiet ":!$PATCH"; then
error "Unstaged changes. Stage to include in patch or temporarily stash."
fi
git diff-index --cached --patch --output="$PATCH" HEAD
git restore --staged --worktree ":!$PATCH"

View File

@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -euo pipefail
if [ $# -lt 1 ]; then
echo "usage: bash $0 <target> [<base>]" >&2
exit 1
fi
set -x
target="$1"
base="${2-}"
bash scripts/upgradeable/transpile.sh
commit="$(git rev-parse --short HEAD)"
branch="$(git rev-parse --abbrev-ref HEAD)"
git add contracts
# detach from the current branch to avoid making changes to it
git checkout --quiet --detach
# switch to the target branch, creating it if necessary
if git rev-parse -q --verify "$target"; then
# if the branch exists, make it the current HEAD without checking out its contents
git reset --soft "$target"
git checkout "$target"
else
# if the branch doesn't exist, create it as an orphan and check it out
git checkout --orphan "$target"
if [ -n "$base" ] && git rev-parse -q --verify "$base"; then
# if base was specified and it exists, set it as the branch history
git reset --soft "$base"
fi
fi
# commit if there are changes to commit
if ! git diff --quiet --cached; then
git commit -m "Transpile $commit"
fi
git checkout "$branch"

View File

@ -0,0 +1,35 @@
#!/usr/bin/env bash
set -euo pipefail -x
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
bash "$DIRNAME/patch-apply.sh"
npm run clean
npm run compile
build_info=($(jq -r '.input.sources | keys | if any(test("^contracts/mocks/.*\\bunreachable\\b")) then empty else input_filename end' artifacts/build-info/*))
build_info_num=${#build_info[@]}
if [ $build_info_num -ne 1 ]; then
echo "found $build_info_num relevant build info files but expected just 1"
exit 1
fi
# -D: delete original and excluded files
# -b: use this build info file
# -i: use included Initializable
# -x: exclude proxy-related contracts with a few exceptions
# -p: emit public initializer
npx @openzeppelin/upgrade-safe-transpiler@latest -D \
-b "$build_info" \
-i contracts/proxy/utils/Initializable.sol \
-x 'contracts-exposed/**/*' \
-x 'contracts/proxy/**/*' \
-x '!contracts/proxy/Clones.sol' \
-x '!contracts/proxy/ERC1967/ERC1967Storage.sol' \
-x '!contracts/proxy/ERC1967/ERC1967Upgrade.sol' \
-x '!contracts/proxy/utils/UUPSUpgradeable.sol' \
-x '!contracts/proxy/beacon/IBeacon.sol' \
-p 'contracts/**/presets/**/*'

View File

@ -0,0 +1,481 @@
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index 2797a088..00000000
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,21 +0,0 @@
----
-name: Bug report
-about: Report a bug in OpenZeppelin Contracts
-
----
-
-<!-- Briefly describe the issue you're experiencing. Tell us what you were trying to do and what happened instead. -->
-
-<!-- Remember, this is not a place to ask for help debugging code. For that, we welcome you in the OpenZeppelin Community Forum: https://forum.openzeppelin.com/. -->
-
-**💻 Environment**
-
-<!-- Tell us what version of OpenZeppelin Contracts you're using, and how you're using it: Truffle, Remix, etc. -->
-
-**📝 Details**
-
-<!-- Describe the problem you have been experiencing in more detail. Include as much information as you think is relevant. Keep in mind that transactions can fail for many reasons; context is key here. -->
-
-**🔢 Code to reproduce bug**
-
-<!-- We will be able to better help if you provide a minimal example that triggers the bug. -->
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 4018cef2..d343a53d 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,4 +1,8 @@
+blank_issues_enabled: false
contact_links:
+ - name: Bug Reports & Feature Requests
+ url: https://github.com/OpenZeppelin/openzeppelin-contracts/issues/new/choose
+ about: Visit the OpenZeppelin Contracts repository
- name: Questions & Support Requests
url: https://forum.openzeppelin.com/c/support/contracts/18
about: Ask in the OpenZeppelin Forum
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index ff596b0c..00000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,14 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for OpenZeppelin Contracts
-
----
-
-**🧐 Motivation**
-<!-- Is your feature request related to a specific problem? Is it just a crazy idea? Tell us about it! -->
-
-**📝 Details**
-<!-- Please describe your feature request in detail. -->
-
-<!-- Make sure that you have reviewed the OpenZeppelin Contracts Contributor Guidelines. -->
-<!-- https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/CONTRIBUTING.md -->
diff --git a/README.md b/README.md
index 9fc95518..53130e3c 100644
--- a/README.md
+++ b/README.md
@@ -16,17 +16,20 @@
:building_construction: **Want to scale your decentralized application?** Check out [OpenZeppelin Defender](https://openzeppelin.com/defender) — a secure platform for automating and monitoring your operations.
+> **Note**
+> You are looking at the upgradeable variant of OpenZeppelin Contracts. Be sure to review the documentation on [Using OpenZeppelin Contracts with Upgrades](https://docs.openzeppelin.com/contracts/4.x/upgradeable).
+
## Overview
### Installation
```
-$ npm install @openzeppelin/contracts
+$ npm install @openzeppelin/contracts-upgradeable
```
OpenZeppelin Contracts features a [stable API](https://docs.openzeppelin.com/contracts/releases-stability#api-stability), which means that your contracts won't break unexpectedly when upgrading to a newer minor version.
-An alternative to npm is to use the GitHub repository (`openzeppelin/openzeppelin-contracts`) to retrieve the contracts. When doing this, make sure to specify the tag for a release such as `v4.5.0`, instead of using the `master` branch.
+An alternative to npm is to use the GitHub repository (`openzeppelin/openzeppelin-contracts-upgradeable`) to retrieve the contracts. When doing this, make sure to specify the tag for a release such as `v4.5.0`, instead of using the `master` branch.
### Usage
@@ -35,10 +38,11 @@ Once installed, you can use the contracts in the library by importing them:
```solidity
pragma solidity ^0.8.0;
-import "@openzeppelin/contracts/token/ERC721/ERC721.sol";
+import "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol";
-contract MyCollectible is ERC721 {
- constructor() ERC721("MyCollectible", "MCO") {
+contract MyCollectible is ERC721Upgradeable {
+ function initialize() initializer public {
+ __ERC721_init("MyCollectible", "MCO");
}
}
```
diff --git a/contracts/finance/VestingWallet.sol b/contracts/finance/VestingWallet.sol
index fe67eb54..d26ea4e1 100644
--- a/contracts/finance/VestingWallet.sol
+++ b/contracts/finance/VestingWallet.sol
@@ -15,6 +15,8 @@ import "../utils/Context.sol";
* Any token transferred to this contract will follow the vesting schedule as if they were locked from the beginning.
* Consequently, if the vesting has already started, any amount of tokens sent to this contract will (at least partly)
* be immediately releasable.
+ *
+ * @custom:storage-size 52
*/
contract VestingWallet is Context {
event EtherReleased(uint256 amount);
diff --git a/contracts/governance/TimelockControllerWith46Migration.sol b/contracts/governance/TimelockControllerWith46Migration.sol
new file mode 100644
index 00000000..3315e7bd
--- /dev/null
+++ b/contracts/governance/TimelockControllerWith46Migration.sol
@@ -0,0 +1,39 @@
+// SPDX-License-Identifier: MIT
+// OpenZeppelin Contracts v4.6.0 (governance/TimelockControllerWith46Migration.sol)
+
+pragma solidity ^0.8.0;
+
+import "./TimelockController.sol";
+
+/**
+ * @dev Extension of the TimelockController that includes an additional
+ * function to migrate from OpenZeppelin Upgradeable Contracts <4.6 to >=4.6.
+ *
+ * This migration is necessary to setup administration rights over the new
+ * `CANCELLER_ROLE`.
+ *
+ * The migration is trustless and can be performed by anyone.
+ *
+ * _Available since v4.6._
+ */
+contract TimelockControllerWith46Migration is TimelockController {
+ constructor(
+ uint256 minDelay,
+ address[] memory proposers,
+ address[] memory executors,
+ address admin
+ ) TimelockController(minDelay, proposers, executors, admin) {}
+
+ /**
+ * @dev Migration function. Running it is necessary for upgradeable
+ * instances that were initially setup with code <4.6 and that upgraded
+ * to >=4.6.
+ */
+ function migrateTo46() public virtual {
+ require(
+ getRoleAdmin(PROPOSER_ROLE) == TIMELOCK_ADMIN_ROLE && getRoleAdmin(CANCELLER_ROLE) == DEFAULT_ADMIN_ROLE,
+ "TimelockController: already migrated"
+ );
+ _setRoleAdmin(CANCELLER_ROLE, TIMELOCK_ADMIN_ROLE);
+ }
+}
diff --git a/contracts/governance/extensions/GovernorVotes.sol b/contracts/governance/extensions/GovernorVotes.sol
index 64431711..885f0e42 100644
--- a/contracts/governance/extensions/GovernorVotes.sol
+++ b/contracts/governance/extensions/GovernorVotes.sol
@@ -10,6 +10,8 @@ import "../../interfaces/IERC5805.sol";
* @dev Extension of {Governor} for voting weight extraction from an {ERC20Votes} token, or since v4.5 an {ERC721Votes} token.
*
* _Available since v4.3._
+ *
+ * @custom:storage-size 51
*/
abstract contract GovernorVotes is Governor {
IERC5805 public immutable token;
diff --git a/contracts/governance/extensions/GovernorVotesComp.sol b/contracts/governance/extensions/GovernorVotesComp.sol
index 17250ad7..1d26b72e 100644
--- a/contracts/governance/extensions/GovernorVotesComp.sol
+++ b/contracts/governance/extensions/GovernorVotesComp.sol
@@ -10,6 +10,8 @@ import "../../token/ERC20/extensions/ERC20VotesComp.sol";
* @dev Extension of {Governor} for voting weight extraction from a Comp token.
*
* _Available since v4.3._
+ *
+ * @custom:storage-size 51
*/
abstract contract GovernorVotesComp is Governor {
ERC20VotesComp public immutable token;
diff --git a/contracts/package.json b/contracts/package.json
index 55e70b17..ceefb984 100644
--- a/contracts/package.json
+++ b/contracts/package.json
@@ -1,5 +1,5 @@
{
- "name": "@openzeppelin/contracts",
+ "name": "@openzeppelin/contracts-upgradeable",
"description": "Secure Smart Contract library for Solidity",
"version": "4.8.2",
"files": [
@@ -13,7 +13,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/OpenZeppelin/openzeppelin-contracts.git"
+ "url": "https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git"
},
"keywords": [
"solidity",
diff --git a/contracts/security/PullPayment.sol b/contracts/security/PullPayment.sol
index 65b4980f..f336592e 100644
--- a/contracts/security/PullPayment.sol
+++ b/contracts/security/PullPayment.sol
@@ -22,6 +22,8 @@ import "../utils/escrow/Escrow.sol";
* To use, derive from the `PullPayment` contract, and use {_asyncTransfer}
* instead of Solidity's `transfer` function. Payees can query their due
* payments with {payments}, and retrieve them with {withdrawPayments}.
+ *
+ * @custom:storage-size 51
*/
abstract contract PullPayment {
Escrow private immutable _escrow;
diff --git a/contracts/token/ERC20/extensions/ERC20Capped.sol b/contracts/token/ERC20/extensions/ERC20Capped.sol
index 16f830d1..9ef98148 100644
--- a/contracts/token/ERC20/extensions/ERC20Capped.sol
+++ b/contracts/token/ERC20/extensions/ERC20Capped.sol
@@ -7,6 +7,8 @@ import "../ERC20.sol";
/**
* @dev Extension of {ERC20} that adds a cap to the supply of tokens.
+ *
+ * @custom:storage-size 51
*/
abstract contract ERC20Capped is ERC20 {
uint256 private immutable _cap;
diff --git a/contracts/token/ERC20/extensions/ERC20Permit.sol b/contracts/token/ERC20/extensions/ERC20Permit.sol
index a357199b..9dc8e894 100644
--- a/contracts/token/ERC20/extensions/ERC20Permit.sol
+++ b/contracts/token/ERC20/extensions/ERC20Permit.sol
@@ -18,6 +18,8 @@ import "../../../utils/Counters.sol";
* need to send a transaction, and thus is not required to hold Ether at all.
*
* _Available since v3.4._
+ *
+ * @custom:storage-size 51
*/
abstract contract ERC20Permit is ERC20, IERC20Permit, EIP712 {
using Counters for Counters.Counter;
diff --git a/contracts/token/ERC20/extensions/ERC20Wrapper.sol b/contracts/token/ERC20/extensions/ERC20Wrapper.sol
index bfe782e4..7264fe32 100644
--- a/contracts/token/ERC20/extensions/ERC20Wrapper.sol
+++ b/contracts/token/ERC20/extensions/ERC20Wrapper.sol
@@ -14,6 +14,8 @@ import "../utils/SafeERC20.sol";
* wrapping of an existing "basic" ERC20 into a governance token.
*
* _Available since v4.2._
+ *
+ * @custom:storage-size 51
*/
abstract contract ERC20Wrapper is ERC20 {
IERC20 private immutable _underlying;
diff --git a/contracts/token/ERC20/utils/TokenTimelock.sol b/contracts/token/ERC20/utils/TokenTimelock.sol
index ed855b7b..3d30f59d 100644
--- a/contracts/token/ERC20/utils/TokenTimelock.sol
+++ b/contracts/token/ERC20/utils/TokenTimelock.sol
@@ -11,6 +11,8 @@ import "./SafeERC20.sol";
*
* Useful for simple vesting schedules like "advisors get all of their tokens
* after 1 year".
+ *
+ * @custom:storage-size 53
*/
contract TokenTimelock {
using SafeERC20 for IERC20;
diff --git a/contracts/utils/cryptography/EIP712.sol b/contracts/utils/cryptography/EIP712.sol
index 6a4e1cad..55d8eced 100644
--- a/contracts/utils/cryptography/EIP712.sol
+++ b/contracts/utils/cryptography/EIP712.sol
@@ -4,7 +4,6 @@
pragma solidity ^0.8.8;
import "./ECDSA.sol";
-import "../ShortStrings.sol";
import "../../interfaces/IERC5267.sol";
/**
@@ -30,27 +29,19 @@ import "../../interfaces/IERC5267.sol";
*
* _Available since v3.4._
*
- * @custom:oz-upgrades-unsafe-allow state-variable-immutable state-variable-assignment
+ * @custom:storage-size 52
*/
abstract contract EIP712 is IERC5267 {
- using ShortStrings for *;
-
bytes32 private constant _TYPE_HASH =
keccak256("EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)");
- // Cache the domain separator as an immutable value, but also store the chain id that it corresponds to, in order to
- // invalidate the cached domain separator if the chain id changes.
- bytes32 private immutable _cachedDomainSeparator;
- uint256 private immutable _cachedChainId;
- address private immutable _cachedThis;
-
+ /// @custom:oz-renamed-from _HASHED_NAME
bytes32 private immutable _hashedName;
+ /// @custom:oz-renamed-from _HASHED_VERSION
bytes32 private immutable _hashedVersion;
- ShortString private immutable _name;
- ShortString private immutable _version;
- string private _nameFallback;
- string private _versionFallback;
+ string private _name;
+ string private _version;
/**
* @dev Initializes the domain separator and parameter caches.
@@ -65,29 +56,23 @@ abstract contract EIP712 is IERC5267 {
* contract upgrade].
*/
constructor(string memory name, string memory version) {
- _name = name.toShortStringWithFallback(_nameFallback);
- _version = version.toShortStringWithFallback(_versionFallback);
- _hashedName = keccak256(bytes(name));
- _hashedVersion = keccak256(bytes(version));
-
- _cachedChainId = block.chainid;
- _cachedDomainSeparator = _buildDomainSeparator();
- _cachedThis = address(this);
+ _name = name;
+ _version = version;
+
+ // Reset prior values in storage if upgrading
+ _hashedName = 0;
+ _hashedVersion = 0;
}
/**
* @dev Returns the domain separator for the current chain.
*/
function _domainSeparatorV4() internal view returns (bytes32) {
- if (address(this) == _cachedThis && block.chainid == _cachedChainId) {
- return _cachedDomainSeparator;
- } else {
- return _buildDomainSeparator();
- }
+ return _buildDomainSeparator();
}
function _buildDomainSeparator() private view returns (bytes32) {
- return keccak256(abi.encode(_TYPE_HASH, _hashedName, _hashedVersion, block.chainid, address(this)));
+ return keccak256(abi.encode(_TYPE_HASH, _EIP712NameHash(), _EIP712VersionHash(), block.chainid, address(this)));
}
/**
@@ -129,14 +114,80 @@ abstract contract EIP712 is IERC5267 {
uint256[] memory extensions
)
{
+ // If the hashed name and version in storage are non-zero, the contract hasn't been properly initialized
+ // and the EIP712 domain is not reliable, as it will be missing name and version.
+ require(_hashedName == 0 && _hashedVersion == 0, "EIP712: Uninitialized");
+
return (
hex"0f", // 01111
- _name.toStringWithFallback(_nameFallback),
- _version.toStringWithFallback(_versionFallback),
+ _EIP712Name(),
+ _EIP712Version(),
block.chainid,
address(this),
bytes32(0),
new uint256[](0)
);
}
+
+ /**
+ * @dev The name parameter for the EIP712 domain.
+ *
+ * NOTE: This function reads from storage by default, but can be redefined to return a constant value if gas costs
+ * are a concern.
+ */
+ function _EIP712Name() internal virtual view returns (string memory) {
+ return _name;
+ }
+
+ /**
+ * @dev The version parameter for the EIP712 domain.
+ *
+ * NOTE: This function reads from storage by default, but can be redefined to return a constant value if gas costs
+ * are a concern.
+ */
+ function _EIP712Version() internal virtual view returns (string memory) {
+ return _version;
+ }
+
+ /**
+ * @dev The hash of the name parameter for the EIP712 domain.
+ *
+ * NOTE: In previous versions this function was virtual. In this version you should override `_EIP712Name` instead.
+ */
+ function _EIP712NameHash() internal view returns (bytes32) {
+ string memory name = _EIP712Name();
+ if (bytes(name).length > 0) {
+ return keccak256(bytes(name));
+ } else {
+ // If the name is empty, the contract may have been upgraded without initializing the new storage.
+ // We return the name hash in storage if non-zero, otherwise we assume the name is empty by design.
+ bytes32 hashedName = _hashedName;
+ if (hashedName != 0) {
+ return hashedName;
+ } else {
+ return keccak256("");
+ }
+ }
+ }
+
+ /**
+ * @dev The hash of the version parameter for the EIP712 domain.
+ *
+ * NOTE: In previous versions this function was virtual. In this version you should override `_EIP712Version` instead.
+ */
+ function _EIP712VersionHash() internal view returns (bytes32) {
+ string memory version = _EIP712Version();
+ if (bytes(version).length > 0) {
+ return keccak256(bytes(version));
+ } else {
+ // If the version is empty, the contract may have been upgraded without initializing the new storage.
+ // We return the version hash in storage if non-zero, otherwise we assume the version is empty by design.
+ bytes32 hashedVersion = _hashedVersion;
+ if (hashedVersion != 0) {
+ return hashedVersion;
+ } else {
+ return keccak256("");
+ }
+ }
+ }
}
diff --git a/package.json b/package.json
index 8458dd61..b4672240 100644
--- a/package.json
+++ b/package.json
@@ -36,7 +36,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/OpenZeppelin/openzeppelin-contracts.git"
+ "url": "https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git"
},
"keywords": [
"solidity",
diff --git a/test/utils/cryptography/EIP712.test.js b/test/utils/cryptography/EIP712.test.js
index 54a4e772..ba4602ed 100644
--- a/test/utils/cryptography/EIP712.test.js
+++ b/test/utils/cryptography/EIP712.test.js
@@ -47,26 +47,6 @@ contract('EIP712', function (accounts) {
const rebuildDomain = await getDomain(this.eip712);
expect(mapValues(rebuildDomain, String)).to.be.deep.equal(mapValues(this.domain, String));
});
-
- if (shortOrLong === 'short') {
- // Long strings are in storage, and the proxy will not be properly initialized unless
- // the upgradeable contract variant is used and the initializer is invoked.
-
- it('adjusts when behind proxy', async function () {
- const factory = await Clones.new();
- const cloneReceipt = await factory.$clone(this.eip712.address);
- const cloneAddress = cloneReceipt.logs.find(({ event }) => event === 'return$clone').args.instance;
- const clone = new EIP712Verifier(cloneAddress);
-
- const cloneDomain = { ...this.domain, verifyingContract: clone.address };
-
- const reportedDomain = await getDomain(clone);
- expect(mapValues(reportedDomain, String)).to.be.deep.equal(mapValues(cloneDomain, String));
-
- const expectedSeparator = await domainSeparator(cloneDomain);
- expect(await clone.$_domainSeparatorV4()).to.equal(expectedSeparator);
- });
- }
});
it('hash digest', async function () {

View File

@ -13,6 +13,7 @@ describe('migrate-imports.js', function () {
try {
await fs.access(path.join('contracts', p), F_OK);
} catch (e) {
if (p.startsWith('proxy/')) continue; // excluded from transpilation of upgradeable contracts
await fs.access(path.join('contracts', getUpgradeablePath(p)), F_OK);
}
}

View File

@ -6,28 +6,48 @@ const { batchInBlock } = require('../helpers/txpool');
const $Checkpoints = artifacts.require('$Checkpoints');
// The library name may be 'Checkpoints' or 'CheckpointsUpgradeable'
const libraryName = $Checkpoints._json.contractName.replace(/^\$/, '');
const traceLengths = [160, 224];
const first = array => (array.length ? array[0] : undefined);
const last = array => (array.length ? array[array.length - 1] : undefined);
contract('Checkpoints', function () {
beforeEach(async function () {
this.mock = await $Checkpoints.new();
this.methods = { trace: {} };
this.methods.history = {
latest: (...args) => this.mock.methods[`$latest_${libraryName}_History(uint256)`](0, ...args),
latestCheckpoint: (...args) => this.mock.methods[`$latestCheckpoint_${libraryName}_History(uint256)`](0, ...args),
length: (...args) => this.mock.methods[`$length_${libraryName}_History(uint256)`](0, ...args),
push: (...args) => this.mock.methods['$push(uint256,uint256)'](0, ...args),
getAtBlock: (...args) => this.mock.$getAtBlock(0, ...args),
getAtRecentBlock: (...args) => this.mock.$getAtProbablyRecentBlock(0, ...args),
};
for (const length of traceLengths) {
this.methods.trace[length] = {
latest: (...args) => this.mock.methods[`$latest_${libraryName}_Trace${length}(uint256)`](0, ...args),
latestCheckpoint: (...args) =>
this.mock.methods[`$latestCheckpoint_${libraryName}_Trace${length}(uint256)`](0, ...args),
length: (...args) => this.mock.methods[`$length_${libraryName}_Trace${length}(uint256)`](0, ...args),
push: (...args) => this.mock.methods[`$push(uint256,uint${256 - length},uint${length})`](0, ...args),
lowerLookup: (...args) => this.mock.methods[`$lowerLookup(uint256,uint${256 - length})`](0, ...args),
upperLookup: (...args) => this.mock.methods[`$upperLookup(uint256,uint${256 - length})`](0, ...args),
upperLookupRecent: (...args) =>
this.mock.methods[`$upperLookupRecent(uint256,uint${256 - length})`](0, ...args),
};
}
});
describe('History checkpoints', function () {
const latest = (self, ...args) => self.methods['$latest_Checkpoints_History(uint256)'](0, ...args);
const latestCheckpoint = (self, ...args) =>
self.methods['$latestCheckpoint_Checkpoints_History(uint256)'](0, ...args);
const push = (self, ...args) => self.methods['$push(uint256,uint256)'](0, ...args);
const getAtBlock = (self, ...args) => self.methods['$getAtBlock(uint256,uint256)'](0, ...args);
const getAtRecentBlock = (self, ...args) => self.methods['$getAtProbablyRecentBlock(uint256,uint256)'](0, ...args);
const getLength = (self, ...args) => self.methods['$length_Checkpoints_History(uint256)'](0, ...args);
describe('without checkpoints', function () {
it('returns zero as latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal('0');
expect(await this.methods.history.latest()).to.be.bignumber.equal('0');
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.history.latestCheckpoint();
expect(ckpt[0]).to.be.equal(false);
expect(ckpt[1]).to.be.bignumber.equal('0');
expect(ckpt[2]).to.be.bignumber.equal('0');
@ -35,49 +55,63 @@ contract('Checkpoints', function () {
it('returns zero as past value', async function () {
await time.advanceBlock();
expect(await getAtBlock(this.mock, (await web3.eth.getBlockNumber()) - 1)).to.be.bignumber.equal('0');
expect(await getAtRecentBlock(this.mock, (await web3.eth.getBlockNumber()) - 1)).to.be.bignumber.equal('0');
expect(await this.methods.history.getAtBlock((await web3.eth.getBlockNumber()) - 1)).to.be.bignumber.equal('0');
expect(
await this.methods.history.getAtRecentBlock((await web3.eth.getBlockNumber()) - 1),
).to.be.bignumber.equal('0');
});
});
describe('with checkpoints', function () {
beforeEach('pushing checkpoints', async function () {
this.tx1 = await push(this.mock, 1);
this.tx2 = await push(this.mock, 2);
this.tx1 = await this.methods.history.push(1);
this.tx2 = await this.methods.history.push(2);
await time.advanceBlock();
this.tx3 = await push(this.mock, 3);
this.tx3 = await this.methods.history.push(3);
await time.advanceBlock();
await time.advanceBlock();
});
it('returns latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal('3');
expect(await this.methods.history.latest()).to.be.bignumber.equal('3');
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.history.latestCheckpoint();
expect(ckpt[0]).to.be.equal(true);
expect(ckpt[1]).to.be.bignumber.equal(web3.utils.toBN(this.tx3.receipt.blockNumber));
expect(ckpt[2]).to.be.bignumber.equal(web3.utils.toBN('3'));
});
for (const getAtBlockVariant of [getAtBlock, getAtRecentBlock]) {
for (const getAtBlockVariant of ['getAtBlock', 'getAtRecentBlock']) {
describe(`lookup: ${getAtBlockVariant}`, function () {
it('returns past values', async function () {
expect(await getAtBlockVariant(this.mock, this.tx1.receipt.blockNumber - 1)).to.be.bignumber.equal('0');
expect(await getAtBlockVariant(this.mock, this.tx1.receipt.blockNumber)).to.be.bignumber.equal('1');
expect(await getAtBlockVariant(this.mock, this.tx2.receipt.blockNumber)).to.be.bignumber.equal('2');
expect(
await this.methods.history[getAtBlockVariant](this.tx1.receipt.blockNumber - 1),
).to.be.bignumber.equal('0');
expect(await this.methods.history[getAtBlockVariant](this.tx1.receipt.blockNumber)).to.be.bignumber.equal(
'1',
);
expect(await this.methods.history[getAtBlockVariant](this.tx2.receipt.blockNumber)).to.be.bignumber.equal(
'2',
);
// Block with no new checkpoints
expect(await getAtBlockVariant(this.mock, this.tx2.receipt.blockNumber + 1)).to.be.bignumber.equal('2');
expect(await getAtBlockVariant(this.mock, this.tx3.receipt.blockNumber)).to.be.bignumber.equal('3');
expect(await getAtBlockVariant(this.mock, this.tx3.receipt.blockNumber + 1)).to.be.bignumber.equal('3');
expect(
await this.methods.history[getAtBlockVariant](this.tx2.receipt.blockNumber + 1),
).to.be.bignumber.equal('2');
expect(await this.methods.history[getAtBlockVariant](this.tx3.receipt.blockNumber)).to.be.bignumber.equal(
'3',
);
expect(
await this.methods.history[getAtBlockVariant](this.tx3.receipt.blockNumber + 1),
).to.be.bignumber.equal('3');
});
it('reverts if block number >= current block', async function () {
await expectRevert(
getAtBlockVariant(this.mock, await web3.eth.getBlockNumber()),
this.methods.history[getAtBlockVariant](await web3.eth.getBlockNumber()),
'Checkpoints: block not yet mined',
);
await expectRevert(
getAtBlockVariant(this.mock, (await web3.eth.getBlockNumber()) + 1),
this.methods.history[getAtBlockVariant]((await web3.eth.getBlockNumber()) + 1),
'Checkpoints: block not yet mined',
);
});
@ -85,58 +119,48 @@ contract('Checkpoints', function () {
}
it('multiple checkpoints in the same block', async function () {
const lengthBefore = await getLength(this.mock);
const lengthBefore = await this.methods.history.length();
await batchInBlock([
() => push(this.mock, 8, { gas: 100000 }),
() => push(this.mock, 9, { gas: 100000 }),
() => push(this.mock, 10, { gas: 100000 }),
() => this.methods.history.push(8, { gas: 100000 }),
() => this.methods.history.push(9, { gas: 100000 }),
() => this.methods.history.push(10, { gas: 100000 }),
]);
expect(await getLength(this.mock)).to.be.bignumber.equal(lengthBefore.addn(1));
expect(await latest(this.mock)).to.be.bignumber.equal('10');
expect(await this.methods.history.length()).to.be.bignumber.equal(lengthBefore.addn(1));
expect(await this.methods.history.latest()).to.be.bignumber.equal('10');
});
it('more than 5 checkpoints', async function () {
for (let i = 4; i <= 6; i++) {
await push(this.mock, i);
await this.methods.history.push(i);
}
expect(await getLength(this.mock)).to.be.bignumber.equal('6');
expect(await this.methods.history.length()).to.be.bignumber.equal('6');
const block = await web3.eth.getBlockNumber();
// recent
expect(await getAtRecentBlock(this.mock, block - 1)).to.be.bignumber.equal('5');
expect(await this.methods.history.getAtRecentBlock(block - 1)).to.be.bignumber.equal('5');
// non-recent
expect(await getAtRecentBlock(this.mock, block - 9)).to.be.bignumber.equal('0');
expect(await this.methods.history.getAtRecentBlock(block - 9)).to.be.bignumber.equal('0');
});
});
});
for (const length of [160, 224]) {
for (const length of traceLengths) {
describe(`Trace${length}`, function () {
const latest = (self, ...args) => self.methods[`$latest_Checkpoints_Trace${length}(uint256)`](0, ...args);
const latestCheckpoint = (self, ...args) =>
self.methods[`$latestCheckpoint_Checkpoints_Trace${length}(uint256)`](0, ...args);
const push = (self, ...args) => self.methods[`$push(uint256,uint${256 - length},uint${length})`](0, ...args);
const lowerLookup = (self, ...args) => self.methods[`$lowerLookup(uint256,uint${256 - length})`](0, ...args);
const upperLookup = (self, ...args) => self.methods[`$upperLookup(uint256,uint${256 - length})`](0, ...args);
const upperLookupRecent = (self, ...args) =>
self.methods[`$upperLookupRecent(uint256,uint${256 - length})`](0, ...args);
const getLength = (self, ...args) => self.methods[`$length_Checkpoints_Trace${length}(uint256)`](0, ...args);
describe('without checkpoints', function () {
it('returns zero as latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].latest()).to.be.bignumber.equal('0');
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.trace[length].latestCheckpoint();
expect(ckpt[0]).to.be.equal(false);
expect(ckpt[1]).to.be.bignumber.equal('0');
expect(ckpt[2]).to.be.bignumber.equal('0');
});
it('lookup returns 0', async function () {
expect(await lowerLookup(this.mock, 0)).to.be.bignumber.equal('0');
expect(await upperLookup(this.mock, 0)).to.be.bignumber.equal('0');
expect(await upperLookupRecent(this.mock, 0)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].lowerLookup(0)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].upperLookup(0)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].upperLookupRecent(0)).to.be.bignumber.equal('0');
});
});
@ -150,46 +174,49 @@ contract('Checkpoints', function () {
{ key: '11', value: '99' },
];
for (const { key, value } of this.checkpoints) {
await push(this.mock, key, value);
await this.methods.trace[length].push(key, value);
}
});
it('length', async function () {
expect(await getLength(this.mock)).to.be.bignumber.equal(this.checkpoints.length.toString());
expect(await this.methods.trace[length].length()).to.be.bignumber.equal(this.checkpoints.length.toString());
});
it('returns latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal(last(this.checkpoints).value);
expect(await this.methods.trace[length].latest()).to.be.bignumber.equal(last(this.checkpoints).value);
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.trace[length].latestCheckpoint();
expect(ckpt[0]).to.be.equal(true);
expect(ckpt[1]).to.be.bignumber.equal(last(this.checkpoints).key);
expect(ckpt[2]).to.be.bignumber.equal(last(this.checkpoints).value);
});
it('cannot push values in the past', async function () {
await expectRevert(push(this.mock, last(this.checkpoints).key - 1, '0'), 'Checkpoint: decreasing keys');
await expectRevert(
this.methods.trace[length].push(last(this.checkpoints).key - 1, '0'),
'Checkpoint: decreasing keys',
);
});
it('can update last value', async function () {
const newValue = '42';
// check length before the update
expect(await getLength(this.mock)).to.be.bignumber.equal(this.checkpoints.length.toString());
expect(await this.methods.trace[length].length()).to.be.bignumber.equal(this.checkpoints.length.toString());
// update last key
await push(this.mock, last(this.checkpoints).key, newValue);
expect(await latest(this.mock)).to.be.bignumber.equal(newValue);
await this.methods.trace[length].push(last(this.checkpoints).key, newValue);
expect(await this.methods.trace[length].latest()).to.be.bignumber.equal(newValue);
// check that length did not change
expect(await getLength(this.mock)).to.be.bignumber.equal(this.checkpoints.length.toString());
expect(await this.methods.trace[length].length()).to.be.bignumber.equal(this.checkpoints.length.toString());
});
it('lower lookup', async function () {
for (let i = 0; i < 14; ++i) {
const value = first(this.checkpoints.filter(x => i <= x.key))?.value || '0';
expect(await lowerLookup(this.mock, i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].lowerLookup(i)).to.be.bignumber.equal(value);
}
});
@ -197,8 +224,8 @@ contract('Checkpoints', function () {
for (let i = 0; i < 14; ++i) {
const value = last(this.checkpoints.filter(x => i >= x.key))?.value || '0';
expect(await upperLookup(this.mock, i)).to.be.bignumber.equal(value);
expect(await upperLookupRecent(this.mock, i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookup(i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookupRecent(i)).to.be.bignumber.equal(value);
}
});
@ -213,13 +240,13 @@ contract('Checkpoints', function () {
const allCheckpoints = [].concat(this.checkpoints, moreCheckpoints);
for (const { key, value } of moreCheckpoints) {
await push(this.mock, key, value);
await this.methods.trace[length].push(key, value);
}
for (let i = 0; i < 25; ++i) {
const value = last(allCheckpoints.filter(x => i >= x.key))?.value || '0';
expect(await upperLookup(this.mock, i)).to.be.bignumber.equal(value);
expect(await upperLookupRecent(this.mock, i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookup(i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookupRecent(i)).to.be.bignumber.equal(value);
}
});
});

View File

@ -60,11 +60,11 @@ contract('EIP712', function (accounts) {
const cloneDomain = { ...this.domain, verifyingContract: clone.address };
const expectedSeparator = await domainSeparator(cloneDomain);
expect(await clone.$_domainSeparatorV4()).to.equal(expectedSeparator);
const reportedDomain = await getDomain(clone);
expect(mapValues(reportedDomain, String)).to.be.deep.equal(mapValues(cloneDomain, String));
const expectedSeparator = await domainSeparator(cloneDomain);
expect(await clone.$_domainSeparatorV4()).to.equal(expectedSeparator);
});
}
});

View File

@ -14,6 +14,9 @@ const getMethods = ms => {
);
};
// Get the name of the library. In the transpiled code it will be EnumerableMapUpgradeable.
const library = EnumerableMap._json.contractName.replace(/^\$/, '');
contract('EnumerableMap', function (accounts) {
const [accountA, accountB, accountC] = accounts;
@ -41,14 +44,14 @@ contract('EnumerableMap', function (accounts) {
getWithMessage: '$get(uint256,address,string)',
tryGet: '$tryGet(uint256,address)',
remove: '$remove(uint256,address)',
length: '$length_EnumerableMap_AddressToUintMap(uint256)',
at: '$at_EnumerableMap_AddressToUintMap(uint256,uint256)',
length: `$length_${library}_AddressToUintMap(uint256)`,
at: `$at_${library}_AddressToUintMap(uint256,uint256)`,
contains: '$contains(uint256,address)',
keys: '$keys_EnumerableMap_AddressToUintMap(uint256)',
keys: `$keys_${library}_AddressToUintMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_AddressToUintMap_address_uint256',
removeReturn: 'return$remove_EnumerableMap_AddressToUintMap_address',
setReturn: `return$set_${library}_AddressToUintMap_address_uint256`,
removeReturn: `return$remove_${library}_AddressToUintMap_address`,
},
);
});
@ -61,18 +64,18 @@ contract('EnumerableMap', function (accounts) {
constants.ZERO_ADDRESS,
getMethods({
set: '$set(uint256,uint256,address)',
get: '$get_EnumerableMap_UintToAddressMap(uint256,uint256)',
getWithMessage: '$get_EnumerableMap_UintToAddressMap(uint256,uint256,string)',
tryGet: '$tryGet_EnumerableMap_UintToAddressMap(uint256,uint256)',
remove: '$remove_EnumerableMap_UintToAddressMap(uint256,uint256)',
length: '$length_EnumerableMap_UintToAddressMap(uint256)',
at: '$at_EnumerableMap_UintToAddressMap(uint256,uint256)',
contains: '$contains_EnumerableMap_UintToAddressMap(uint256,uint256)',
keys: '$keys_EnumerableMap_UintToAddressMap(uint256)',
get: `$get_${library}_UintToAddressMap(uint256,uint256)`,
getWithMessage: `$get_${library}_UintToAddressMap(uint256,uint256,string)`,
tryGet: `$tryGet_${library}_UintToAddressMap(uint256,uint256)`,
remove: `$remove_${library}_UintToAddressMap(uint256,uint256)`,
length: `$length_${library}_UintToAddressMap(uint256)`,
at: `$at_${library}_UintToAddressMap(uint256,uint256)`,
contains: `$contains_${library}_UintToAddressMap(uint256,uint256)`,
keys: `$keys_${library}_UintToAddressMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_UintToAddressMap_uint256_address',
removeReturn: 'return$remove_EnumerableMap_UintToAddressMap_uint256',
setReturn: `return$set_${library}_UintToAddressMap_uint256_address`,
removeReturn: `return$remove_${library}_UintToAddressMap_uint256`,
},
);
});
@ -85,18 +88,18 @@ contract('EnumerableMap', function (accounts) {
constants.ZERO_BYTES32,
getMethods({
set: '$set(uint256,bytes32,bytes32)',
get: '$get_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
getWithMessage: '$get_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32,string)',
tryGet: '$tryGet_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
remove: '$remove_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
length: '$length_EnumerableMap_Bytes32ToBytes32Map(uint256)',
at: '$at_EnumerableMap_Bytes32ToBytes32Map(uint256,uint256)',
contains: '$contains_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
keys: '$keys_EnumerableMap_Bytes32ToBytes32Map(uint256)',
get: `$get_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
getWithMessage: `$get_${library}_Bytes32ToBytes32Map(uint256,bytes32,string)`,
tryGet: `$tryGet_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
remove: `$remove_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
length: `$length_${library}_Bytes32ToBytes32Map(uint256)`,
at: `$at_${library}_Bytes32ToBytes32Map(uint256,uint256)`,
contains: `$contains_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
keys: `$keys_${library}_Bytes32ToBytes32Map(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_Bytes32ToBytes32Map_bytes32_bytes32',
removeReturn: 'return$remove_EnumerableMap_Bytes32ToBytes32Map_bytes32',
setReturn: `return$set_${library}_Bytes32ToBytes32Map_bytes32_bytes32`,
removeReturn: `return$remove_${library}_Bytes32ToBytes32Map_bytes32`,
},
);
});
@ -109,18 +112,18 @@ contract('EnumerableMap', function (accounts) {
new BN('0'),
getMethods({
set: '$set(uint256,uint256,uint256)',
get: '$get_EnumerableMap_UintToUintMap(uint256,uint256)',
getWithMessage: '$get_EnumerableMap_UintToUintMap(uint256,uint256,string)',
tryGet: '$tryGet_EnumerableMap_UintToUintMap(uint256,uint256)',
remove: '$remove_EnumerableMap_UintToUintMap(uint256,uint256)',
length: '$length_EnumerableMap_UintToUintMap(uint256)',
at: '$at_EnumerableMap_UintToUintMap(uint256,uint256)',
contains: '$contains_EnumerableMap_UintToUintMap(uint256,uint256)',
keys: '$keys_EnumerableMap_UintToUintMap(uint256)',
get: `$get_${library}_UintToUintMap(uint256,uint256)`,
getWithMessage: `$get_${library}_UintToUintMap(uint256,uint256,string)`,
tryGet: `$tryGet_${library}_UintToUintMap(uint256,uint256)`,
remove: `$remove_${library}_UintToUintMap(uint256,uint256)`,
length: `$length_${library}_UintToUintMap(uint256)`,
at: `$at_${library}_UintToUintMap(uint256,uint256)`,
contains: `$contains_${library}_UintToUintMap(uint256,uint256)`,
keys: `$keys_${library}_UintToUintMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_UintToUintMap_uint256_uint256',
removeReturn: 'return$remove_EnumerableMap_UintToUintMap_uint256',
setReturn: `return$set_${library}_UintToUintMap_uint256_uint256`,
removeReturn: `return$remove_${library}_UintToUintMap_uint256`,
},
);
});
@ -133,18 +136,18 @@ contract('EnumerableMap', function (accounts) {
new BN('0'),
getMethods({
set: '$set(uint256,bytes32,uint256)',
get: '$get_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
getWithMessage: '$get_EnumerableMap_Bytes32ToUintMap(uint256,bytes32,string)',
tryGet: '$tryGet_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
remove: '$remove_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
length: '$length_EnumerableMap_Bytes32ToUintMap(uint256)',
at: '$at_EnumerableMap_Bytes32ToUintMap(uint256,uint256)',
contains: '$contains_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
keys: '$keys_EnumerableMap_Bytes32ToUintMap(uint256)',
get: `$get_${library}_Bytes32ToUintMap(uint256,bytes32)`,
getWithMessage: `$get_${library}_Bytes32ToUintMap(uint256,bytes32,string)`,
tryGet: `$tryGet_${library}_Bytes32ToUintMap(uint256,bytes32)`,
remove: `$remove_${library}_Bytes32ToUintMap(uint256,bytes32)`,
length: `$length_${library}_Bytes32ToUintMap(uint256)`,
at: `$at_${library}_Bytes32ToUintMap(uint256,uint256)`,
contains: `$contains_${library}_Bytes32ToUintMap(uint256,bytes32)`,
keys: `$keys_${library}_Bytes32ToUintMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_Bytes32ToUintMap_bytes32_uint256',
removeReturn: 'return$remove_EnumerableMap_Bytes32ToUintMap_bytes32',
setReturn: `return$set_${library}_Bytes32ToUintMap_bytes32_uint256`,
removeReturn: `return$remove_${library}_Bytes32ToUintMap_bytes32`,
},
);
});

View File

@ -12,6 +12,9 @@ const getMethods = ms => {
);
};
// Get the name of the library. In the transpiled code it will be EnumerableSetUpgradeable.
const library = EnumerableSet._json.contractName.replace(/^\$/, '');
contract('EnumerableSet', function (accounts) {
beforeEach(async function () {
this.set = await EnumerableSet.new();
@ -25,13 +28,13 @@ contract('EnumerableSet', function (accounts) {
add: '$add(uint256,bytes32)',
remove: '$remove(uint256,bytes32)',
contains: '$contains(uint256,bytes32)',
length: '$length_EnumerableSet_Bytes32Set(uint256)',
at: '$at_EnumerableSet_Bytes32Set(uint256,uint256)',
values: '$values_EnumerableSet_Bytes32Set(uint256)',
length: `$length_${library}_Bytes32Set(uint256)`,
at: `$at_${library}_Bytes32Set(uint256,uint256)`,
values: `$values_${library}_Bytes32Set(uint256)`,
}),
{
addReturn: 'return$add_EnumerableSet_Bytes32Set_bytes32',
removeReturn: 'return$remove_EnumerableSet_Bytes32Set_bytes32',
addReturn: `return$add_${library}_Bytes32Set_bytes32`,
removeReturn: `return$remove_${library}_Bytes32Set_bytes32`,
},
);
});
@ -44,13 +47,13 @@ contract('EnumerableSet', function (accounts) {
add: '$add(uint256,address)',
remove: '$remove(uint256,address)',
contains: '$contains(uint256,address)',
length: '$length_EnumerableSet_AddressSet(uint256)',
at: '$at_EnumerableSet_AddressSet(uint256,uint256)',
values: '$values_EnumerableSet_AddressSet(uint256)',
length: `$length_${library}_AddressSet(uint256)`,
at: `$at_${library}_AddressSet(uint256,uint256)`,
values: `$values_${library}_AddressSet(uint256)`,
}),
{
addReturn: 'return$add_EnumerableSet_AddressSet_address',
removeReturn: 'return$remove_EnumerableSet_AddressSet_address',
addReturn: `return$add_${library}_AddressSet_address`,
removeReturn: `return$remove_${library}_AddressSet_address`,
},
);
});
@ -63,13 +66,13 @@ contract('EnumerableSet', function (accounts) {
add: '$add(uint256,uint256)',
remove: '$remove(uint256,uint256)',
contains: '$contains(uint256,uint256)',
length: '$length_EnumerableSet_UintSet(uint256)',
at: '$at_EnumerableSet_UintSet(uint256,uint256)',
values: '$values_EnumerableSet_UintSet(uint256)',
length: `$length_${library}_UintSet(uint256)`,
at: `$at_${library}_UintSet(uint256,uint256)`,
values: `$values_${library}_UintSet(uint256)`,
}),
{
addReturn: 'return$add_EnumerableSet_UintSet_uint256',
removeReturn: 'return$remove_EnumerableSet_UintSet_uint256',
addReturn: `return$add_${library}_UintSet_uint256`,
removeReturn: `return$remove_${library}_UintSet_uint256`,
},
);
});