Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/develop' into bold-merge
Browse files Browse the repository at this point in the history
  • Loading branch information
gzeoneth committed Aug 16, 2024
2 parents 9466f1c + 79a206d commit 06a7aeb
Show file tree
Hide file tree
Showing 10 changed files with 240 additions and 49 deletions.
35 changes: 34 additions & 1 deletion hardhat.config.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import '@nomiclabs/hardhat-waffle'
import 'hardhat-deploy'
import '@nomiclabs/hardhat-ethers'
import '@nomiclabs/hardhat-etherscan'
import '@nomicfoundation/hardhat-verify'
import '@typechain/hardhat'
import 'solidity-coverage'
import 'hardhat-gas-reporter'
Expand Down Expand Up @@ -116,6 +116,24 @@ module.exports = {
? [process.env['DEVNET_PRIVKEY']]
: [],
},
holesky: {
url: 'https://holesky.infura.io/v3/' + process.env['INFURA_KEY'],
accounts: process.env['DEVNET_PRIVKEY']
? [process.env['DEVNET_PRIVKEY']]
: [],
},
arbRinkeby: {
url: 'https://rinkeby.arbitrum.io/rpc',
accounts: process.env['DEVNET_PRIVKEY']
? [process.env['DEVNET_PRIVKEY']]
: [],
},
arbGoerliRollup: {
url: 'https://goerli-rollup.arbitrum.io/rpc',
accounts: process.env['DEVNET_PRIVKEY']
? [process.env['DEVNET_PRIVKEY']]
: [],
},
arbSepolia: {
url: 'https://sepolia-rollup.arbitrum.io/rpc',
accounts: process.env['DEVNET_PRIVKEY']
Expand All @@ -134,6 +152,18 @@ module.exports = {
? [process.env['MAINNET_PRIVKEY']]
: [],
},
base: {
url: 'https://mainnet.base.org',
accounts: process.env['MAINNET_PRIVKEY']
? [process.env['MAINNET_PRIVKEY']]
: [],
},
baseSepolia: {
url: 'https://sepolia.base.org',
accounts: process.env['DEVNET_PRIVKEY']
? [process.env['DEVNET_PRIVKEY']]
: [],
},
geth: {
url: 'http://localhost:8545',
},
Expand All @@ -142,9 +172,12 @@ module.exports = {
apiKey: {
mainnet: process.env['ETHERSCAN_API_KEY'],
sepolia: process.env['ETHERSCAN_API_KEY'],
holesky: process.env['ETHERSCAN_API_KEY'],
arbitrumOne: process.env['ARBISCAN_API_KEY'],
nova: process.env['NOVA_ARBISCAN_API_KEY'],
arbSepolia: process.env['ARBISCAN_API_KEY'],
base: process.env['BASESCAN_API_KEY'],
baseSepolia: process.env['BASESCAN_API_KEY'],
},
customChains: [
{
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
"test:upgrade": "./scripts/testUpgrade.bash",
"test:foundry": "forge test --gas-limit 10000000000",
"test:update": "yarn run test:signatures || yarn run test:storage",
"metadatahash": "yarn build:all && hardhat run scripts/printMetadataHashes.ts",
"upload-4bytes": "forge build && find ./out -type f -name \"*.json\" -exec cast upload-signature {} + | grep -v Duplicated:",
"postinstall": "patch-package",
"deploy-factory": "hardhat run scripts/deployment.ts",
Expand All @@ -66,8 +67,8 @@
"@arbitrum/nitro-contracts-2.0.0": "npm:@arbitrum/[email protected]",
"@arbitrum/sdk": "^3.4.1",
"@ethersproject/providers": "^5.7.2",
"@nomicfoundation/hardhat-verify": "^2.0.9",
"@nomiclabs/hardhat-ethers": "npm:hardhat-deploy-ethers@^0.3.0-beta.13",
"@nomiclabs/hardhat-etherscan": "^3.1.0",
"@nomiclabs/hardhat-waffle": "^2.0.1",
"@tovarishfin/hardhat-yul": "^3.0.5",
"@typechain/ethers-v5": "^10.0.0",
Expand Down
1 change: 0 additions & 1 deletion scripts/createEthRollup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import { createRollup } from './rollupCreation'

async function main() {
const feeToken = ethers.constants.AddressZero

const rollupCreatorAddress = process.env.ROLLUP_CREATOR_ADDRESS
if (!rollupCreatorAddress) {
throw new Error('ROLLUP_CREATOR_ADDRESS not set')
Expand Down
18 changes: 17 additions & 1 deletion scripts/deployment.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,26 @@
import { ethers } from 'hardhat'
import '@nomiclabs/hardhat-ethers'
import { deployAllContracts } from './deploymentUtils'
import { deployAllContracts, _isRunningOnArbitrum } from './deploymentUtils'
import { maxDataSize } from './config'

import {
ArbSys__factory
} from '../build/types'

async function main() {
const [signer] = await ethers.getSigners()

console.log('Deploying contracts with maxDataSize:', maxDataSize)
if (process.env['IGNORE_MAX_DATA_SIZE_WARNING'] !== 'true') {
let isArbitrum = await _isRunningOnArbitrum(signer)
if (isArbitrum && maxDataSize as any !== 104857) {
throw new Error('maxDataSize should be 104857 when the parent chain is Arbitrum (set IGNORE_MAX_DATA_SIZE_WARNING to ignore)')
} else if (!isArbitrum && maxDataSize as any !== 117964) {
throw new Error('maxDataSize should be 117964 when the parent chain is not Arbitrum (set IGNORE_MAX_DATA_SIZE_WARNING to ignore)')
}
} else {
console.log('Ignoring maxDataSize warning')
}

try {
// Deploying all contracts
Expand Down
72 changes: 63 additions & 9 deletions scripts/deploymentUtils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { ethers } from 'hardhat'
import { ContractFactory, Contract, Overrides, BigNumber } from 'ethers'
import { ContractFactory, Contract, Overrides, BigNumber, Wallet } from 'ethers'
import '@nomiclabs/hardhat-ethers'
import { run } from 'hardhat'
import {
Expand All @@ -9,13 +9,15 @@ import {
import { Toolkit4844 } from '../test/contract/toolkit4844'
import {
ArbOwner__factory,
ArbOwnerPublic__factory,
ArbSys__factory,
CacheManager__factory,
} from '../build/types'

const INIT_CACHE_SIZE = 536870912
const INIT_DECAY = 10322197911
const ARB_OWNER_ADDRESS = '0x0000000000000000000000000000000000000070'
const ARB_OWNER_PUBLIC_ADDRESS = '0x000000000000000000000000000000000000006b'
const ARB_SYS_ADDRESS = '0x0000000000000000000000000000000000000064'

// Define a verification function
Expand All @@ -32,9 +34,11 @@ export async function verifyContract(
contract?: string
address: string
constructorArguments: any[]
force: boolean
} = {
address: contractAddress,
constructorArguments: constructorArguments,
force: true,
}

// if contractPathAndName is provided, add it to the verification options
Expand All @@ -45,8 +49,15 @@ export async function verifyContract(
await run('verify:verify', verificationOptions)
console.log(`Verified contract ${contractName} successfully.`)
} catch (error: any) {
if (error.message.includes('Already Verified')) {
if (error.message.toLowerCase().includes('already verified')) {
console.log(`Contract ${contractName} is already verified.`)
} else if (error.message.includes('does not have bytecode')) {
await verifyContract(
contractName,
contractAddress,
constructorArguments,
contractPathAndName
)
} else {
console.error(
`Verification for ${contractName} failed with the following error: ${error.message}`
Expand All @@ -69,11 +80,21 @@ export async function deployContract(
let deploymentArgs = [...constructorArgs]
if (overrides) {
deploymentArgs.push(overrides)
} else {
// overrides = {
// maxFeePerGas: ethers.utils.parseUnits('5.0', 'gwei'),
// maxPriorityFeePerGas: ethers.utils.parseUnits('0.01', 'gwei')
// }
// deploymentArgs.push(overrides)
}

const contract: Contract = await connectedFactory.deploy(...deploymentArgs)
await contract.deployTransaction.wait()
console.log(`New ${contractName} created at address:`, contract.address)
console.log(
`* New ${contractName} created at address: ${
contract.address
} ${constructorArgs.join(' ')}`
)

if (verify)
await verifyContract(contractName, contract.address, constructorArgs)
Expand Down Expand Up @@ -234,6 +255,15 @@ export async function deployAllContracts(
verify
)
const deployHelper = await deployContract('DeployHelper', signer, [], verify)
if (verify && !process.env.DISABLE_VERIFICATION) {
// Deploy RollupProxy contract only for verification, should not be used anywhere else
await deployContract(
'RollupProxy',
signer,
[],
verify
)
}
return {
bridgeCreator,
prover0,
Expand All @@ -252,42 +282,66 @@ export async function deployAllContracts(
}

export async function deployAndSetCacheManager(
chainOwnerWallet: any,
chainOwnerWallet: Wallet,
verify: boolean = true
) {
// deploy CacheManager
const cacheManagerLogic = await deployContract(
'CacheManager',
chainOwnerWallet,
[],
verify
)

const proxyAdmin = await deployContract(
'ProxyAdmin',
chainOwnerWallet,
[],
verify
)

const cacheManagerProxy = await deployContract(
'TransparentUpgradeableProxy',
chainOwnerWallet,
[cacheManagerLogic.address, proxyAdmin.address, '0x'],
verify
)

// initialize CacheManager
const cacheManager = CacheManager__factory.connect(
cacheManagerProxy.address,
chainOwnerWallet
)

await (await cacheManager.initialize(INIT_CACHE_SIZE, INIT_DECAY)).wait()

const arbOwner = ArbOwner__factory.connect(
/// add CacheManager to ArbOwner
const arbOwnerAccount = (
await ArbOwnerPublic__factory.connect(
ARB_OWNER_PUBLIC_ADDRESS,
chainOwnerWallet
).getAllChainOwners()
)[0]

const arbOwnerPrecompile = ArbOwner__factory.connect(
ARB_OWNER_ADDRESS,
chainOwnerWallet
)
await (await arbOwner.addWasmCacheManager(cacheManagerProxy.address)).wait()
if ((await chainOwnerWallet.provider.getCode(arbOwnerAccount)) === '0x') {
// arb owner is EOA, add cache manager directly
await (
await arbOwnerPrecompile.addWasmCacheManager(cacheManagerProxy.address)
).wait()
} else {
// assume upgrade executor is arb owner
const upgradeExecutor = new ethers.Contract(
arbOwnerAccount,
UpgradeExecutorABI,
chainOwnerWallet
)
const data = arbOwnerPrecompile.interface.encodeFunctionData(
'addWasmCacheManager',
[cacheManagerProxy.address]
)
await (await upgradeExecutor.executeCall(ARB_OWNER_ADDRESS, data)).wait()
}

return cacheManagerProxy
}
Expand Down
90 changes: 90 additions & 0 deletions scripts/printMetadataHashes.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
import path from 'path'
import fs from 'fs-extra'
import hre from 'hardhat'
import { execSync } from 'child_process'

main()
.then(() => process.exit(0))
.catch((error: Error) => {
console.error(error)
process.exit(1)
})

async function main() {
const contracts: string[] = [
'Inbox',
'Outbox',
'SequencerInbox',
'Bridge',
'ERC20Inbox',
'ERC20Outbox',
'SequencerInbox',
'ERC20Bridge',
'RollupProxy',
'RollupAdminLogic',
'RollupUserLogic',
'ChallengeManager',
]

// Print the current git tag
const gitTag = execSync('git describe --tags').toString().trim()
console.log(`Current tag: ${gitTag}`)

// Check if yarn packages match yarn.lock
try {
execSync('yarn install --check-files', { stdio: 'ignore' })
} catch (e) {
console.error('Yarn packages does not match yarn.lock')
process.exit(1)
}

// Check if the current working directory is clean
try {
execSync('git update-index --really-refresh', { stdio: 'ignore' })
if (execSync('git status --porcelain').toString().trim()) {
console.error('The current working directory have staged changes.')
process.exit(1)
}
} catch (e) {
console.error('The current working directory is not clean.')
process.exit(1)
}

console.log('HARDHAT:')
for (const contract of contracts) {
const hash = await _getHardhatMetadataHash(contract)
console.log(`${contract}: ${hash}`)
}

console.log('\nFOUNDRY:')
for (const contract of contracts) {
const hash = await _getFoundryMetadataHash(contract)
console.log(`${contract}: ${hash}`)
}
}

async function _getHardhatMetadataHash(contractName: string): Promise<string> {
const artifact = await hre.artifacts.readArtifact(contractName)
return _extractMetadataHash(artifact.bytecode)
}

async function _getFoundryMetadataHash(contractName: string): Promise<string> {
const artifactPath = path.join(
'out',
`${contractName}.sol`,
`${contractName}.json`
)
const artifact = await fs.readJson(artifactPath)
return _extractMetadataHash(artifact.bytecode.object)
}

function _extractMetadataHash(bytecode: string): string {
const metadataPattern = /a264697066735822([a-fA-F0-9]{64})/
const matches = bytecode.match(metadataPattern)

if (matches && matches.length > 1) {
return matches[1]
} else {
throw new Error('No metadata hash found in bytecode')
}
}
Loading

0 comments on commit 06a7aeb

Please sign in to comment.