Update docs
This commit is contained in:
40
scripts/gen-nav.js
Normal file
40
scripts/gen-nav.js
Normal file
@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const path = require('path');
|
||||
const proc = require('child_process');
|
||||
const startCase = require('lodash.startcase');
|
||||
|
||||
const baseDir = process.argv[2];
|
||||
|
||||
const files = proc.execFileSync(
|
||||
'find', [baseDir, '-type', 'f'], { encoding: 'utf8' },
|
||||
).split('\n').filter(s => s !== '');
|
||||
|
||||
console.log('.API');
|
||||
|
||||
function getPageTitle (directory) {
|
||||
if (directory === 'metatx') {
|
||||
return 'Meta Transactions';
|
||||
} else {
|
||||
return startCase(directory);
|
||||
}
|
||||
}
|
||||
|
||||
const links = files.map((file) => {
|
||||
const doc = file.replace(baseDir, '');
|
||||
const title = path.parse(file).name;
|
||||
|
||||
return {
|
||||
xref: `* xref:${doc}[${getPageTitle(title)}]`,
|
||||
title,
|
||||
};
|
||||
});
|
||||
|
||||
// Case-insensitive sort based on titles (so 'token/ERC20' gets sorted as 'erc20')
|
||||
const sortedLinks = links.sort(function (a, b) {
|
||||
return a.title.toLowerCase().localeCompare(b.title.toLowerCase(), undefined, { numeric: true });
|
||||
});
|
||||
|
||||
for (const link of sortedLinks) {
|
||||
console.log(link.xref);
|
||||
}
|
||||
6
scripts/git-user-config.sh
Normal file
6
scripts/git-user-config.sh
Normal file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail -x
|
||||
|
||||
git config user.name 'github-actions'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
44
scripts/inheritanceOrdering.js
Normal file
44
scripts/inheritanceOrdering.js
Normal file
@ -0,0 +1,44 @@
|
||||
const path = require('path');
|
||||
const graphlib = require('graphlib');
|
||||
const { findAll } = require('solidity-ast/utils');
|
||||
const { _: artifacts } = require('yargs').argv;
|
||||
|
||||
for (const artifact of artifacts) {
|
||||
const { output: solcOutput } = require(path.resolve(__dirname, '..', artifact));
|
||||
|
||||
const graph = new graphlib.Graph({ directed: true });
|
||||
const names = {};
|
||||
const linearized = [];
|
||||
|
||||
for (const source in solcOutput.contracts) {
|
||||
for (const contractDef of findAll('ContractDefinition', solcOutput.sources[source].ast)) {
|
||||
names[contractDef.id] = contractDef.name;
|
||||
linearized.push(contractDef.linearizedBaseContracts);
|
||||
|
||||
contractDef.linearizedBaseContracts.forEach((c1, i, contracts) => contracts.slice(i + 1).forEach(c2 => {
|
||||
graph.setEdge(c1, c2);
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
/// graphlib.alg.findCycles will not find minimal cycles.
|
||||
/// We are only interested int cycles of lengths 2 (needs proof)
|
||||
graph.nodes().forEach((x, i, nodes) => nodes
|
||||
.slice(i + 1)
|
||||
.filter(y => graph.hasEdge(x, y) && graph.hasEdge(y, x))
|
||||
.map(y => {
|
||||
console.log(`Conflict between ${names[x]} and ${names[y]} detected in the following dependency chains:`);
|
||||
linearized
|
||||
.filter(chain => chain.includes(parseInt(x)) && chain.includes(parseInt(y)))
|
||||
.forEach(chain => {
|
||||
const comp = chain.indexOf(parseInt(x)) < chain.indexOf(parseInt(y)) ? '>' : '<';
|
||||
console.log(`- ${names[x]} ${comp} ${names[y]} in ${names[chain.find(Boolean)]}`);
|
||||
// console.log(`- ${names[x]} ${comp} ${names[y]}: ${chain.reverse().map(id => names[id]).join(', ')}`);
|
||||
});
|
||||
process.exitCode = 1;
|
||||
}));
|
||||
}
|
||||
|
||||
if (!process.exitCode) {
|
||||
console.log('Contract ordering is consistent.');
|
||||
}
|
||||
177
scripts/migrate-imports.js
Executable file
177
scripts/migrate-imports.js
Executable file
@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const { promises: fs } = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const pathUpdates = {
|
||||
// 'access/AccessControl.sol': undefined,
|
||||
// 'access/Ownable.sol': undefined,
|
||||
'access/TimelockController.sol': 'governance/TimelockController.sol',
|
||||
'cryptography/ECDSA.sol': 'utils/cryptography/ECDSA.sol',
|
||||
'cryptography/MerkleProof.sol': 'utils/cryptography/MerkleProof.sol',
|
||||
'drafts/EIP712.sol': 'utils/cryptography/draft-EIP712.sol',
|
||||
'drafts/ERC20Permit.sol': 'token/ERC20/extensions/draft-ERC20Permit.sol',
|
||||
'drafts/IERC20Permit.sol': 'token/ERC20/extensions/draft-IERC20Permit.sol',
|
||||
'GSN/Context.sol': 'utils/Context.sol',
|
||||
// 'GSN/GSNRecipientERC20Fee.sol': undefined,
|
||||
// 'GSN/GSNRecipientSignature.sol': undefined,
|
||||
// 'GSN/GSNRecipient.sol': undefined,
|
||||
// 'GSN/IRelayHub.sol': undefined,
|
||||
// 'GSN/IRelayRecipient.sol': undefined,
|
||||
'introspection/ERC165Checker.sol': 'utils/introspection/ERC165Checker.sol',
|
||||
'introspection/ERC165.sol': 'utils/introspection/ERC165.sol',
|
||||
'introspection/ERC1820Implementer.sol': 'utils/introspection/ERC1820Implementer.sol',
|
||||
'introspection/IERC165.sol': 'utils/introspection/IERC165.sol',
|
||||
'introspection/IERC1820Implementer.sol': 'utils/introspection/IERC1820Implementer.sol',
|
||||
'introspection/IERC1820Registry.sol': 'utils/introspection/IERC1820Registry.sol',
|
||||
'math/Math.sol': 'utils/math/Math.sol',
|
||||
'math/SafeMath.sol': 'utils/math/SafeMath.sol',
|
||||
'math/SignedSafeMath.sol': 'utils/math/SignedSafeMath.sol',
|
||||
'payment/escrow/ConditionalEscrow.sol': 'utils/escrow/ConditionalEscrow.sol',
|
||||
'payment/escrow/Escrow.sol': 'utils/escrow/Escrow.sol',
|
||||
'payment/escrow/RefundEscrow.sol': 'utils/escrow/RefundEscrow.sol',
|
||||
'payment/PaymentSplitter.sol': 'finance/PaymentSplitter.sol',
|
||||
'utils/PaymentSplitter.sol': 'finance/PaymentSplitter.sol',
|
||||
'payment/PullPayment.sol': 'security/PullPayment.sol',
|
||||
'presets/ERC1155PresetMinterPauser.sol': 'token/ERC1155/presets/ERC1155PresetMinterPauser.sol',
|
||||
'presets/ERC20PresetFixedSupply.sol': 'token/ERC20/presets/ERC20PresetFixedSupply.sol',
|
||||
'presets/ERC20PresetMinterPauser.sol': 'token/ERC20/presets/ERC20PresetMinterPauser.sol',
|
||||
'presets/ERC721PresetMinterPauserAutoId.sol': 'token/ERC721/presets/ERC721PresetMinterPauserAutoId.sol',
|
||||
'presets/ERC777PresetFixedSupply.sol': 'token/ERC777/presets/ERC777PresetFixedSupply.sol',
|
||||
'proxy/BeaconProxy.sol': 'proxy/beacon/BeaconProxy.sol',
|
||||
// 'proxy/Clones.sol': undefined,
|
||||
'proxy/IBeacon.sol': 'proxy/beacon/IBeacon.sol',
|
||||
'proxy/Initializable.sol': 'proxy/utils/Initializable.sol',
|
||||
'utils/Initializable.sol': 'proxy/utils/Initializable.sol',
|
||||
'proxy/ProxyAdmin.sol': 'proxy/transparent/ProxyAdmin.sol',
|
||||
// 'proxy/Proxy.sol': undefined,
|
||||
'proxy/TransparentUpgradeableProxy.sol': 'proxy/transparent/TransparentUpgradeableProxy.sol',
|
||||
'proxy/UpgradeableBeacon.sol': 'proxy/beacon/UpgradeableBeacon.sol',
|
||||
'proxy/UpgradeableProxy.sol': 'proxy/ERC1967/ERC1967Proxy.sol',
|
||||
'token/ERC1155/ERC1155Burnable.sol': 'token/ERC1155/extensions/ERC1155Burnable.sol',
|
||||
'token/ERC1155/ERC1155Holder.sol': 'token/ERC1155/utils/ERC1155Holder.sol',
|
||||
'token/ERC1155/ERC1155Pausable.sol': 'token/ERC1155/extensions/ERC1155Pausable.sol',
|
||||
'token/ERC1155/ERC1155Receiver.sol': 'token/ERC1155/utils/ERC1155Receiver.sol',
|
||||
// 'token/ERC1155/ERC1155.sol': undefined,
|
||||
'token/ERC1155/IERC1155MetadataURI.sol': 'token/ERC1155/extensions/IERC1155MetadataURI.sol',
|
||||
// 'token/ERC1155/IERC1155Receiver.sol': undefined,
|
||||
// 'token/ERC1155/IERC1155.sol': undefined,
|
||||
'token/ERC20/ERC20Burnable.sol': 'token/ERC20/extensions/ERC20Burnable.sol',
|
||||
'token/ERC20/ERC20Capped.sol': 'token/ERC20/extensions/ERC20Capped.sol',
|
||||
'token/ERC20/ERC20Pausable.sol': 'token/ERC20/extensions/ERC20Pausable.sol',
|
||||
'token/ERC20/ERC20Snapshot.sol': 'token/ERC20/extensions/ERC20Snapshot.sol',
|
||||
// 'token/ERC20/ERC20.sol': undefined,
|
||||
// 'token/ERC20/IERC20.sol': undefined,
|
||||
'token/ERC20/SafeERC20.sol': 'token/ERC20/utils/SafeERC20.sol',
|
||||
'token/ERC20/TokenTimelock.sol': 'token/ERC20/utils/TokenTimelock.sol',
|
||||
'token/ERC721/ERC721Burnable.sol': 'token/ERC721/extensions/ERC721Burnable.sol',
|
||||
'token/ERC721/ERC721Holder.sol': 'token/ERC721/utils/ERC721Holder.sol',
|
||||
'token/ERC721/ERC721Pausable.sol': 'token/ERC721/extensions/ERC721Pausable.sol',
|
||||
// 'token/ERC721/ERC721.sol': undefined,
|
||||
'token/ERC721/IERC721Enumerable.sol': 'token/ERC721/extensions/IERC721Enumerable.sol',
|
||||
'token/ERC721/IERC721Metadata.sol': 'token/ERC721/extensions/IERC721Metadata.sol',
|
||||
// 'token/ERC721/IERC721Receiver.sol': undefined,
|
||||
// 'token/ERC721/IERC721.sol': undefined,
|
||||
// 'token/ERC777/ERC777.sol': undefined,
|
||||
// 'token/ERC777/IERC777Recipient.sol': undefined,
|
||||
// 'token/ERC777/IERC777Sender.sol': undefined,
|
||||
// 'token/ERC777/IERC777.sol': undefined,
|
||||
// 'utils/Address.sol': undefined,
|
||||
// 'utils/Arrays.sol': undefined,
|
||||
// 'utils/Context.sol': undefined,
|
||||
// 'utils/Counters.sol': undefined,
|
||||
// 'utils/Create2.sol': undefined,
|
||||
'utils/EnumerableMap.sol': 'utils/structs/EnumerableMap.sol',
|
||||
'utils/EnumerableSet.sol': 'utils/structs/EnumerableSet.sol',
|
||||
'utils/Pausable.sol': 'security/Pausable.sol',
|
||||
'utils/ReentrancyGuard.sol': 'security/ReentrancyGuard.sol',
|
||||
'utils/SafeCast.sol': 'utils/math/SafeCast.sol',
|
||||
// 'utils/Strings.sol': undefined,
|
||||
};
|
||||
|
||||
async function main (paths = [ 'contracts' ]) {
|
||||
const files = await listFilesRecursively(paths, /\.sol$/);
|
||||
|
||||
const updatedFiles = [];
|
||||
for (const file of files) {
|
||||
if (await updateFile(file, updateImportPaths)) {
|
||||
updatedFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
if (updatedFiles.length > 0) {
|
||||
console.log(`${updatedFiles.length} file(s) were updated`);
|
||||
for (const c of updatedFiles) {
|
||||
console.log('-', c);
|
||||
}
|
||||
} else {
|
||||
console.log('No files were updated');
|
||||
}
|
||||
}
|
||||
|
||||
async function listFilesRecursively (paths, filter) {
|
||||
const queue = paths;
|
||||
const files = [];
|
||||
|
||||
while (queue.length > 0) {
|
||||
const top = queue.shift();
|
||||
const stat = await fs.stat(top);
|
||||
if (stat.isFile()) {
|
||||
if (top.match(filter)) {
|
||||
files.push(top);
|
||||
}
|
||||
} else if (stat.isDirectory()) {
|
||||
for (const name of await fs.readdir(top)) {
|
||||
queue.push(path.join(top, name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
async function updateFile (file, update) {
|
||||
const content = await fs.readFile(file, 'utf8');
|
||||
const updatedContent = update(content);
|
||||
if (updatedContent !== content) {
|
||||
await fs.writeFile(file, updatedContent);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function updateImportPaths (source) {
|
||||
for (const [ oldPath, newPath ] of Object.entries(pathUpdates)) {
|
||||
source = source.replace(
|
||||
path.join('@openzeppelin/contracts', oldPath),
|
||||
path.join('@openzeppelin/contracts', newPath),
|
||||
);
|
||||
source = source.replace(
|
||||
path.join('@openzeppelin/contracts-upgradeable', getUpgradeablePath(oldPath)),
|
||||
path.join('@openzeppelin/contracts-upgradeable', getUpgradeablePath(newPath)),
|
||||
);
|
||||
}
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
function getUpgradeablePath (file) {
|
||||
const { dir, name, ext } = path.parse(file);
|
||||
const upgradeableName = name + 'Upgradeable';
|
||||
return path.format({ dir, ext, name: upgradeableName });
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
pathUpdates,
|
||||
updateImportPaths,
|
||||
getUpgradeablePath,
|
||||
};
|
||||
|
||||
if (require.main === module) {
|
||||
const args = process.argv.length > 2 ? process.argv.slice(2) : undefined;
|
||||
main(args).catch(e => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
12
scripts/prepack.sh
Executable file
12
scripts/prepack.sh
Executable file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
shopt -s globstar
|
||||
|
||||
# cross platform `mkdir -p`
|
||||
node -e 'fs.mkdirSync("build/contracts", { recursive: true })'
|
||||
|
||||
cp artifacts/contracts/**/*.json build/contracts
|
||||
rm build/contracts/*.dbg.json
|
||||
|
||||
node scripts/remove-ignored-artifacts.js
|
||||
15
scripts/prepare-contracts-package.sh
Executable file
15
scripts/prepare-contracts-package.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# cd to the root of the repo
|
||||
cd "$(git rev-parse --show-toplevel)"
|
||||
|
||||
# avoids re-compilation during publishing of both packages
|
||||
if [[ ! -v ALREADY_COMPILED ]]; then
|
||||
npm run clean
|
||||
npm run prepare
|
||||
npm run prepack
|
||||
fi
|
||||
|
||||
cp README.md contracts/
|
||||
mkdir contracts/build contracts/build/contracts
|
||||
cp -r build/contracts/*.json contracts/build/contracts
|
||||
16
scripts/prepare-docs-solc.js
Normal file
16
scripts/prepare-docs-solc.js
Normal file
@ -0,0 +1,16 @@
|
||||
const hre = require('hardhat');
|
||||
|
||||
const { getCompilersDir } = require('hardhat/internal/util/global-dir');
|
||||
const { CompilerDownloader } = require('hardhat/internal/solidity/compiler/downloader');
|
||||
const { Compiler } = require('hardhat/internal/solidity/compiler');
|
||||
|
||||
const [{ version }] = hre.config.solidity.compilers;
|
||||
|
||||
async function getSolc () {
|
||||
const downloader = new CompilerDownloader(await getCompilersDir(), { forceSolcJs: true });
|
||||
const { compilerPath } = await downloader.getDownloadedCompilerPath(version);
|
||||
const compiler = new Compiler(compilerPath);
|
||||
return compiler.getSolc();
|
||||
}
|
||||
|
||||
module.exports = Object.assign(getSolc(), { __esModule: true });
|
||||
21
scripts/prepare-docs.sh
Executable file
21
scripts/prepare-docs.sh
Executable file
@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
|
||||
OUTDIR=docs/modules/api/pages/
|
||||
|
||||
if [ ! -d node_modules ]; then
|
||||
npm ci
|
||||
fi
|
||||
|
||||
rm -rf "$OUTDIR"
|
||||
|
||||
solidity-docgen \
|
||||
-t docs \
|
||||
-o "$OUTDIR" \
|
||||
-e contracts/mocks,contracts/examples \
|
||||
--output-structure readmes \
|
||||
--helpers ./docs/helpers.js \
|
||||
--solc-module ./scripts/prepare-docs-solc.js
|
||||
|
||||
node scripts/gen-nav.js "$OUTDIR" > "$OUTDIR/../nav.adoc"
|
||||
153
scripts/release/release.sh
Executable file
153
scripts/release/release.sh
Executable file
@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Exit script as soon as a command fails.
|
||||
set -o errexit
|
||||
|
||||
# Default the prerelease version suffix to rc
|
||||
: ${PRERELEASE_SUFFIX:=rc}
|
||||
|
||||
log() {
|
||||
# Print to stderr to prevent this from being 'returned'
|
||||
echo "$@" > /dev/stderr
|
||||
}
|
||||
|
||||
current_version() {
|
||||
echo "v$(node --print --eval "require('./package.json').version")"
|
||||
}
|
||||
|
||||
current_release_branch() {
|
||||
v="$(current_version)" # 3.3.0-rc.0
|
||||
vf="${v%-"$PRERELEASE_SUFFIX".*}" # 3.3.0
|
||||
r="${vf%.*}" # 3.3
|
||||
echo "release-$r"
|
||||
}
|
||||
|
||||
assert_current_branch() {
|
||||
current_branch="$(git symbolic-ref --short HEAD)"
|
||||
expected_branch="$1"
|
||||
if [[ "$current_branch" != "$expected_branch" ]]; then
|
||||
log "Current branch '$current_branch' is not '$expected_branch'"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
push_release_branch_and_tag() {
|
||||
git push upstream "$(current_release_branch)" "$(current_version)"
|
||||
}
|
||||
|
||||
publish() {
|
||||
dist_tag="$1"
|
||||
|
||||
log "Publishing openzeppelin-solidity on npm"
|
||||
npm publish --tag "$dist_tag" --otp "$(prompt_otp)"
|
||||
|
||||
log "Publishing @openzeppelin/contracts on npm"
|
||||
cd contracts
|
||||
env ALREADY_COMPILED= \
|
||||
npm publish --tag "$dist_tag" --otp "$(prompt_otp)"
|
||||
cd ..
|
||||
|
||||
if [[ "$dist_tag" == "latest" ]]; then
|
||||
otp="$(prompt_otp)"
|
||||
npm dist-tag rm --otp "$otp" openzeppelin-solidity next
|
||||
npm dist-tag rm --otp "$otp" @openzeppelin/contracts next
|
||||
fi
|
||||
}
|
||||
|
||||
push_and_publish() {
|
||||
dist_tag="$1"
|
||||
|
||||
log "Pushing release branch and tags to upstream"
|
||||
push_release_branch_and_tag
|
||||
|
||||
publish "$dist_tag"
|
||||
}
|
||||
|
||||
prompt_otp() {
|
||||
log -n "Enter npm 2FA token: "
|
||||
read -r otp
|
||||
echo "$otp"
|
||||
}
|
||||
|
||||
environment_check() {
|
||||
if ! git remote get-url upstream &> /dev/null; then
|
||||
log "No 'upstream' remote found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if npm whoami &> /dev/null; then
|
||||
log "Will publish as '$(npm whoami)'"
|
||||
else
|
||||
log "Not logged in into npm, run 'npm login' first"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
environment_check
|
||||
|
||||
if [[ "$*" == "push" ]]; then
|
||||
push_and_publish next
|
||||
|
||||
elif [[ "$*" == "start minor" ]]; then
|
||||
log "Creating new minor pre-release"
|
||||
|
||||
assert_current_branch master
|
||||
|
||||
# Create temporary release branch
|
||||
git checkout -b release-temp
|
||||
|
||||
# This bumps minor and adds prerelease suffix, commits the changes, and tags the commit
|
||||
npm version preminor --preid="$PRERELEASE_SUFFIX"
|
||||
|
||||
# Rename the release branch
|
||||
git branch --move "$(current_release_branch)"
|
||||
|
||||
push_and_publish next
|
||||
|
||||
elif [[ "$*" == "start major" ]]; then
|
||||
log "Creating new major pre-release"
|
||||
|
||||
assert_current_branch master
|
||||
|
||||
# Create temporary release branch
|
||||
git checkout -b release-temp
|
||||
|
||||
# This bumps major and adds prerelease suffix, commits the changes, and tags the commit
|
||||
npm version premajor --preid="$PRERELEASE_SUFFIX"
|
||||
|
||||
# Rename the release branch
|
||||
git branch --move "$(current_release_branch)"
|
||||
|
||||
push_and_publish next
|
||||
|
||||
elif [[ "$*" == "rc" ]]; then
|
||||
log "Bumping pre-release"
|
||||
|
||||
assert_current_branch "$(current_release_branch)"
|
||||
|
||||
# Bumps prerelease number, commits and tags
|
||||
npm version prerelease
|
||||
|
||||
push_and_publish next
|
||||
|
||||
elif [[ "$*" == "final" ]]; then
|
||||
# Update changelog release date, remove prerelease suffix, tag, push to git, publish in npm, remove next dist-tag
|
||||
log "Creating final release"
|
||||
|
||||
assert_current_branch "$(current_release_branch)"
|
||||
|
||||
# This will remove the prerelease suffix from the version
|
||||
npm version patch
|
||||
|
||||
push_release_branch_and_tag
|
||||
|
||||
push_and_publish latest
|
||||
|
||||
npm deprecate 'openzeppelin-solidity@>=4.0.0' "This package is now published as @openzeppelin/contracts. Please change your dependency."
|
||||
|
||||
log "Remember to merge the release branch into master and push upstream"
|
||||
|
||||
else
|
||||
log "Unknown command: '$*'"
|
||||
exit 1
|
||||
fi
|
||||
16
scripts/release/synchronize-versions.js
Executable file
16
scripts/release/synchronize-versions.js
Executable file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// Synchronizes the version in contracts/package.json with the one in package.json.
|
||||
// This is run automatically when npm version is run.
|
||||
|
||||
const fs = require('fs');
|
||||
const cp = require('child_process');
|
||||
|
||||
setVersion('contracts/package.json');
|
||||
|
||||
function setVersion (file) {
|
||||
const json = JSON.parse(fs.readFileSync(file));
|
||||
json.version = process.env.npm_package_version;
|
||||
fs.writeFileSync(file, JSON.stringify(json, null, 2) + '\n');
|
||||
cp.execFileSync('git', ['add', file]);
|
||||
}
|
||||
34
scripts/release/update-changelog-release-date.js
Executable file
34
scripts/release/update-changelog-release-date.js
Executable file
@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// Sets the release date of the current release in the changelog.
|
||||
// This is run automatically when npm version is run.
|
||||
|
||||
const fs = require('fs');
|
||||
const cp = require('child_process');
|
||||
|
||||
const suffix = process.env.PRERELEASE_SUFFIX || 'rc';
|
||||
|
||||
const changelog = fs.readFileSync('CHANGELOG.md', 'utf8');
|
||||
|
||||
// The changelog entry to be updated looks like this:
|
||||
// ## Unreleased
|
||||
// We need to add the version and release date in a YYYY-MM-DD format, so that it looks like this:
|
||||
// ## 2.5.3 (2019-04-25)
|
||||
|
||||
const pkg = require('../../package.json');
|
||||
const version = pkg.version.replace(new RegExp('-' + suffix + '\\..*'), '');
|
||||
|
||||
const header = new RegExp(`^## (Unreleased|${version})$`, 'm');
|
||||
|
||||
if (!header.test(changelog)) {
|
||||
console.error('Missing changelog entry');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const newHeader = pkg.version.indexOf(suffix) === -1
|
||||
? `## ${version} (${new Date().toISOString().split('T')[0]})`
|
||||
: `## ${version}`;
|
||||
|
||||
fs.writeFileSync('CHANGELOG.md', changelog.replace(header, newHeader));
|
||||
|
||||
cp.execSync('git add CHANGELOG.md', { stdio: 'inherit' });
|
||||
35
scripts/release/update-comment.js
Executable file
35
scripts/release/update-comment.js
Executable file
@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require('fs');
|
||||
const proc = require('child_process');
|
||||
const semver = require('semver');
|
||||
const run = (cmd, ...args) => proc.execFileSync(cmd, args, { encoding: 'utf8' }).trim();
|
||||
|
||||
const gitStatus = run('git', 'status', '--porcelain', '-uno', 'contracts/**/*.sol');
|
||||
if (gitStatus.length > 0) {
|
||||
console.error('Contracts directory is not clean');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { version } = require('../../package.json');
|
||||
|
||||
// Get latest tag according to semver.
|
||||
const [ tag ] = run('git', 'tag')
|
||||
.split(/\r?\n/)
|
||||
.filter(v => semver.lt(semver.coerce(v), version)) // only consider older tags, ignore current prereleases
|
||||
.sort(semver.rcompare);
|
||||
|
||||
// Ordering tag → HEAD is important here.
|
||||
const files = run('git', 'diff', tag, 'HEAD', '--name-only', 'contracts/**/*.sol')
|
||||
.split(/\r?\n/)
|
||||
.filter(file => file && !file.match(/mock/i));
|
||||
|
||||
for (const file of files) {
|
||||
const current = fs.readFileSync(file, 'utf8');
|
||||
const updated = current.replace(
|
||||
/(\/\/ SPDX-License-Identifier:.*)$(\n\/\/ OpenZeppelin Contracts .*$)?/m,
|
||||
`$1\n// OpenZeppelin Contracts (last updated v${version}) (${file.replace('contracts/', '')})`,
|
||||
);
|
||||
fs.writeFileSync(file, updated);
|
||||
}
|
||||
|
||||
run('git', 'add', '--update', 'contracts');
|
||||
9
scripts/release/version.sh
Executable file
9
scripts/release/version.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
|
||||
scripts/release/update-changelog-release-date.js
|
||||
scripts/release/synchronize-versions.js
|
||||
scripts/release/update-comment.js
|
||||
|
||||
oz-docs update-version
|
||||
45
scripts/remove-ignored-artifacts.js
Normal file
45
scripts/remove-ignored-artifacts.js
Normal file
@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// This script removes the build artifacts of ignored contracts.
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const match = require('micromatch');
|
||||
|
||||
function readJSON (path) {
|
||||
return JSON.parse(fs.readFileSync(path));
|
||||
}
|
||||
|
||||
const pkgFiles = readJSON('package.json').files;
|
||||
|
||||
// Get only negated patterns.
|
||||
const ignorePatterns = pkgFiles
|
||||
.filter(pat => pat.startsWith('!'))
|
||||
// Remove the negation part. Makes micromatch usage more intuitive.
|
||||
.map(pat => pat.slice(1));
|
||||
|
||||
const ignorePatternsSubtrees = ignorePatterns
|
||||
// Add **/* to ignore all files contained in the directories.
|
||||
.concat(ignorePatterns.map(pat => path.join(pat, '**/*')))
|
||||
.map(p => p.replace(/^\//, ''));
|
||||
|
||||
const artifactsDir = 'build/contracts';
|
||||
const buildinfo = 'artifacts/build-info';
|
||||
const filenames = fs.readdirSync(buildinfo);
|
||||
|
||||
let n = 0;
|
||||
|
||||
for (const filename of filenames) {
|
||||
const solcOutput = readJSON(path.join(buildinfo, filename)).output;
|
||||
for (const sourcePath in solcOutput.contracts) {
|
||||
const ignore = match.any(sourcePath, ignorePatternsSubtrees);
|
||||
if (ignore) {
|
||||
for (const contract in solcOutput.contracts[sourcePath]) {
|
||||
fs.unlinkSync(path.join(artifactsDir, contract + '.json'));
|
||||
n += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.error(`Removed ${n} mock artifacts`);
|
||||
55
scripts/update-docs-branch.js
Normal file
55
scripts/update-docs-branch.js
Normal file
@ -0,0 +1,55 @@
|
||||
const proc = require('child_process');
|
||||
const read = cmd => proc.execSync(cmd, { encoding: 'utf8' }).trim();
|
||||
const run = cmd => { proc.execSync(cmd, { stdio: 'inherit' }); };
|
||||
const tryRead = cmd => { try { return read(cmd); } catch (e) { return undefined; } };
|
||||
|
||||
const releaseBranchRegex = /^release-v(?<version>(?<major>\d+)\.(?<minor>\d+)(?:\.(?<patch>\d+))?)$/;
|
||||
|
||||
const currentBranch = read(`git rev-parse --abbrev-ref HEAD`);
|
||||
const match = currentBranch.match(releaseBranchRegex);
|
||||
|
||||
if (!match) {
|
||||
console.error(`Not currently on a release branch`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (/-.*$/.test(require('../package.json').version)) {
|
||||
console.error(`Refusing to update docs: prerelease detected`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const current = match.groups;
|
||||
const docsBranch = `docs-v${current.major}.x`;
|
||||
|
||||
// Fetch remotes and find the docs branch if it exists
|
||||
run(`git fetch --all --no-tags`);
|
||||
const matchingDocsBranches = tryRead(`git rev-parse --glob='*/${docsBranch}'`);
|
||||
|
||||
if (!matchingDocsBranches) {
|
||||
// Create the branch
|
||||
run(`git checkout --orphan ${docsBranch}`);
|
||||
} else {
|
||||
const [publishedRef, ...others] = new Set(matchingDocsBranches.split('\n'));
|
||||
if (others.length > 0) {
|
||||
console.error(
|
||||
`Found conflicting ${docsBranch} branches.\n`
|
||||
+ `Either local branch is outdated or there are multiple matching remote branches.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
const publishedVersion = JSON.parse(read(`git show ${publishedRef}:package.json`)).version;
|
||||
const publishedMinor = publishedVersion.match(/\d+\.(?<minor>\d+)\.\d+/).groups.minor;
|
||||
if (current.minor < publishedMinor) {
|
||||
console.error(`Refusing to update docs: newer version is published`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
run(`git checkout --quiet --detach`);
|
||||
run(`git reset --soft ${publishedRef}`);
|
||||
run(`git checkout ${docsBranch}`);
|
||||
}
|
||||
|
||||
run(`npm run prepare-docs`);
|
||||
run(`git add -f docs`); // --force needed because generated docs files are gitignored
|
||||
run(`git commit -m "Update docs"`);
|
||||
run(`git checkout ${currentBranch}`);
|
||||
Reference in New Issue
Block a user