Skip to content

Commit

Permalink
address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
StanislavBreadless committed Mar 25, 2024
1 parent d1dc85a commit 79df3ee
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 70 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ interface IStateTransitionManager {

function protocolVersion() external view returns (uint256);

function protocolVersionTimestamp(uint256 _protocolVersion) external view returns (uint256);
function protocolVersionDeadline(uint256 _protocolVersion) external view returns (uint256);

function protocolVersionIsActive(uint256 _protocolVersion) external view returns (bool);

Expand All @@ -88,7 +88,7 @@ interface IStateTransitionManager {
function setNewVersionUpgrade(
Diamond.DiamondCutData calldata _cutData,
uint256 _oldProtocolVersion,
uint256 _oldProtocolVersionTimestamp,
uint256 _oldprotocolVersionDeadline,
uint256 _newProtocolVersion
) external;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ contract StateTransitionManager is IStateTransitionManager, ReentrancyGuard, Own
uint256 public protocolVersion;

/// @dev timestamp when protocolVersion can be last used
mapping(uint256 _protocolVersion => uint256) public protocolVersionTimestamp;
mapping(uint256 _protocolVersion => uint256) public protocolVersionDeadline;

/// @dev validatorTimelock contract address, used to setChainId
address public validatorTimelock;
Expand Down Expand Up @@ -87,7 +87,7 @@ contract StateTransitionManager is IStateTransitionManager, ReentrancyGuard, Own

genesisUpgrade = _initializeData.genesisUpgrade;
protocolVersion = _initializeData.protocolVersion;
protocolVersionTimestamp[_initializeData.protocolVersion] = type(uint256).max;
protocolVersionDeadline[_initializeData.protocolVersion] = type(uint256).max;
validatorTimelock = _initializeData.validatorTimelock;

// We need to initialize the state hash because it is used in the commitment of the next batch
Expand Down Expand Up @@ -147,23 +147,23 @@ contract StateTransitionManager is IStateTransitionManager, ReentrancyGuard, Own
function setNewVersionUpgrade(
Diamond.DiamondCutData calldata _cutData,
uint256 _oldProtocolVersion,
uint256 _oldProtocolVersionTimestamp,
uint256 _oldprotocolVersionDeadline,
uint256 _newProtocolVersion
) external onlyOwner {
protocolVersionTimestamp[_oldProtocolVersion] = _oldProtocolVersionTimestamp;
protocolVersionDeadline[_oldProtocolVersion] = _oldprotocolVersionDeadline;
upgradeCutHash[_oldProtocolVersion] = keccak256(abi.encode(_cutData));
protocolVersionTimestamp[_newProtocolVersion] = type(uint256).max;
protocolVersionDeadline[_newProtocolVersion] = type(uint256).max;
protocolVersion = _newProtocolVersion;
}

/// @dev check that the protocolVersion is active
function protocolVersionIsActive(uint256 _protocolVersion) external view override returns (bool) {
return block.timestamp <= protocolVersionTimestamp[_protocolVersion];
return block.timestamp <= protocolVersionDeadline[_protocolVersion];
}

/// @dev set the protocol version timestamp
function setProtocolVersionTimestamp(uint256 _protocolVersion, uint256 _timestamp) external onlyOwner {
protocolVersionTimestamp[_protocolVersion] = _timestamp;
function setprotocolVersionDeadline(uint256 _protocolVersion, uint256 _timestamp) external onlyOwner {
protocolVersionDeadline[_protocolVersion] = _timestamp;
}

/// @dev set upgrade for some protocolVersion
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ pragma solidity 0.8.24;

import {ZkSyncStateTransitionBase} from "./ZkSyncStateTransitionBase.sol";
import {COMMIT_TIMESTAMP_NOT_OLDER, COMMIT_TIMESTAMP_APPROXIMATION_DELTA, EMPTY_STRING_KECCAK, L2_TO_L1_LOG_SERIALIZE_SIZE, MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, PACKED_L2_BLOCK_TIMESTAMP_MASK, PUBLIC_INPUT_SHIFT, POINT_EVALUATION_PRECOMPILE_ADDR} from "../../../common/Config.sol";
import {IExecutor, L2_LOG_ADDRESS_OFFSET, L2_LOG_KEY_OFFSET, L2_LOG_VALUE_OFFSET, SystemLogKey, LogProcessingOutput, PubdataSource, BLS_MODULUS, PUBDATA_COMMITMENT_SIZE, PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET, PUBDATA_COMMITMENT_COMMITMENT_OFFSET, MAX_NUMBER_OF_BLOBS, TOTAL_BLOBS_IN_COMMITMENT, MAX_CALLDATA_SIZE} from "../../chain-interfaces/IExecutor.sol";
import {IExecutor, L2_LOG_ADDRESS_OFFSET, L2_LOG_KEY_OFFSET, L2_LOG_VALUE_OFFSET, SystemLogKey, LogProcessingOutput, PubdataSource, BLS_MODULUS, PUBDATA_COMMITMENT_SIZE, PUBDATA_COMMITMENT_CLAIMED_VALUE_OFFSET, PUBDATA_COMMITMENT_COMMITMENT_OFFSET, MAX_NUMBER_OF_BLOBS, TOTAL_BLOBS_IN_COMMITMENT, BLOB_SIZE_BYTES} from "../../chain-interfaces/IExecutor.sol";
import {PriorityQueue, PriorityOperation} from "../../libraries/PriorityQueue.sol";
import {UncheckedMath} from "../../../common/libraries/UncheckedMath.sol";
import {UnsafeBytes} from "../../../common/libraries/UnsafeBytes.sol";
Expand Down Expand Up @@ -42,7 +42,6 @@ contract ExecutorFacet is ZkSyncStateTransitionBase, IExecutor {
// Check that batch contain all meta information for L2 logs.
// Get the chained hash of priority transaction hashes.
LogProcessingOutput memory logOutput = _processL2Logs(_newBatch, _expectedSystemContractUpgradeTxHash);
bytes32[] memory blobHashes = _processBlobLogs(_newBatch);

bytes32[] memory blobCommitments = new bytes32[](MAX_NUMBER_OF_BLOBS);
if (s.feeParams.pubdataPricingMode == PubdataPricingMode.Validium) {
Expand All @@ -51,10 +50,10 @@ contract ExecutorFacet is ZkSyncStateTransitionBase, IExecutor {
require(_newBatch.pubdataCommitments.length == 1);
} else if (pubdataSource == uint8(PubdataSource.Blob)) {
// In this scenario, pubdataCommitments is a list of: opening point (16 bytes) || claimed value (32 bytes) || commitment (48 bytes) || proof (48 bytes)) = 144 bytes
blobCommitments = _verifyBlobInformation(_newBatch.pubdataCommitments[1:], blobHashes);
blobCommitments = _verifyBlobInformation(_newBatch.pubdataCommitments[1:], logOutput.blobHashes);
} else if (pubdataSource == uint8(PubdataSource.Calldata)) {
// In this scenario pubdataCommitments is actual pubdata consisting of l2 to l1 logs, l2 to l1 message, compressed smart contract bytecode, and compressed state diffs
require(_newBatch.pubdataCommitments.length <= MAX_CALLDATA_SIZE, "cz");
require(_newBatch.pubdataCommitments.length <= BLOB_SIZE_BYTES, "cz");
require(
logOutput.pubdataHash ==
keccak256(_newBatch.pubdataCommitments[1:_newBatch.pubdataCommitments.length - 32]),
Expand All @@ -77,7 +76,12 @@ contract ExecutorFacet is ZkSyncStateTransitionBase, IExecutor {
_verifyBatchTimestamp(logOutput.packedBatchAndL2BlockTimestamp, _newBatch.timestamp, _previousBatch.timestamp);

// Create batch commitment for the proof verification
bytes32 commitment = _createBatchCommitment(_newBatch, logOutput.stateDiffHash, blobCommitments, blobHashes);
bytes32 commitment = _createBatchCommitment(
_newBatch,
logOutput.stateDiffHash,
blobCommitments,
logOutput.blobHashes
);

return
StoredBatchInfo({
Expand Down Expand Up @@ -130,6 +134,8 @@ contract ExecutorFacet is ZkSyncStateTransitionBase, IExecutor {
// Copy L2 to L1 logs into memory.
bytes memory emittedL2Logs = _newBatch.systemLogs;

logOutput.blobHashes = new bytes32[](MAX_NUMBER_OF_BLOBS);

// Used as bitmap to set/check log processing happens exactly once.
// See SystemLogKey enum in Constants.sol for ordering.
uint256 processedLogs;
Expand Down Expand Up @@ -167,6 +173,18 @@ contract ExecutorFacet is ZkSyncStateTransitionBase, IExecutor {
} else if (logKey == uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY)) {
require(logSender == L2_BOOTLOADER_ADDRESS, "bk");
logOutput.numberOfLayer1Txs = uint256(logValue);
} else if (
logKey >= uint256(SystemLogKey.BLOB_ONE_HASH_KEY) &&
logKey < uint256(SystemLogKey.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY)
) {
require(logSender == L2_PUBDATA_CHUNK_PUBLISHER_ADDR, "pc");
uint8 blobNumber = uint8(logKey) - uint8(SystemLogKey.BLOB_ONE_HASH_KEY);

// While the fact that `blobNumber` is a valid blob number is implicitly checked by the fact
// that Solidity provides array overflow protection, we still double check it manually in case
// we accidentally put `unchecked` at the top of the loop and generally for better error messages.
require(blobNumber < MAX_NUMBER_OF_BLOBS, "b6");
logOutput.blobHashes[blobNumber] = logValue;
} else if (logKey == uint256(SystemLogKey.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY)) {
require(logSender == L2_BOOTLOADER_ADDRESS, "bu");
require(_expectedSystemContractUpgradeTxHash == logValue, "ut");
Expand All @@ -185,43 +203,6 @@ contract ExecutorFacet is ZkSyncStateTransitionBase, IExecutor {
}
}

/// @dev Check that L2 blob logs are proper and batch contain all information for them
/// @dev The logs processed here should line up such that only one log for each key from the
/// SystemLogKey enum in Constants.sol is processed per new batch.
function _processBlobLogs(CommitBatchInfo calldata _newBatch) internal pure returns (bytes32[] memory blobHashes) {
// Copy L2 to L1 logs into memory.
bytes memory emittedL2Logs = _newBatch.systemLogs;

// Used as bitmap to set/check log processing happens exactly once.
// See SystemLogKey enum in Constants.sol for ordering.
uint256 processedLogs;

blobHashes = new bytes32[](MAX_NUMBER_OF_BLOBS);

// linear traversal of the logs
for (uint256 i = 0; i < emittedL2Logs.length; i = i.uncheckedAdd(L2_TO_L1_LOG_SERIALIZE_SIZE)) {
// Extract the values to be compared to/used such as the log sender, key, and value
(address logSender, ) = UnsafeBytes.readAddress(emittedL2Logs, i + L2_LOG_ADDRESS_OFFSET);
(uint256 logKey, ) = UnsafeBytes.readUint256(emittedL2Logs, i + L2_LOG_KEY_OFFSET);
(bytes32 logValue, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + L2_LOG_VALUE_OFFSET);

if (
logKey >= uint256(SystemLogKey.BLOB_ONE_HASH_KEY) &&
logKey != uint256(SystemLogKey.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY)
) {
uint8 key = uint8(logKey) - uint8(SystemLogKey.BLOB_ONE_HASH_KEY);
// Ensure that the log hasn't been processed already
require(!_checkBit(processedLogs, key), "pk");
processedLogs = _setBit(processedLogs, key);

require(logSender == L2_PUBDATA_CHUNK_PUBLISHER_ADDR, "pc");
blobHashes[logKey - uint256(SystemLogKey.BLOB_ONE_HASH_KEY)] = logValue;
}
}
// We have 6 logs so that corresponds to 2^6 - 1 = 63
require(processedLogs == 2 ** MAX_NUMBER_OF_BLOBS - 1, "l8");
}

/// @inheritdoc IExecutor
function commitBatches(
StoredBatchInfo memory _lastCommittedBatchData,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,13 @@ struct LogProcessingOutput {
bytes32 stateDiffHash;
bytes32 l2LogsTreeRoot;
uint256 packedBatchAndL2BlockTimestamp;
bytes32[] blobHashes;
}

/// @dev The maximum number of bytes that calldata should be.
uint256 constant MAX_CALLDATA_SIZE = 128_000;
/// @dev Total number of bytes in a blob. Blob = 4096 field elements * 31 bytes per field element
/// @dev EIP-4844 defines it as 131_072 but we use 4096 * 31 within our circuits to always fit within a field element
/// @dev Our circuits will prove that a EIP-4844 blob and our internal blob are the same.
uint256 constant BLOB_SIZE_BYTES = 126_976;

/// @dev Offset used to pull Address From Log. Equal to 4 (bytes for isService)
uint256 constant L2_LOG_ADDRESS_OFFSET = 4;
Expand Down
32 changes: 16 additions & 16 deletions l1-contracts/test/test_config/constant/hardhat.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,96 +3,96 @@
"name": "DAI",
"symbol": "DAI",
"decimals": 18,
"address": "0x0F3934f734e62Daa87715B3E4640eFDB89246571"
"address": "0x212EC7200E3C37084B24e8cD5588CDCf77560365"
},
{
"name": "wBTC",
"symbol": "wBTC",
"decimals": 8,
"address": "0x421e18B8c90Fe0e4AA6f049d6D5fdbA3093b698C"
"address": "0xdF13C5fE82D6747Af13a1986BA85d641EF522055"
},
{
"name": "BAT",
"symbol": "BAT",
"decimals": 18,
"address": "0x94f0779546Ed463dABBa2f71aB10410e9C8A839D"
"address": "0xb03CA6F0A9d76f9b443b4C98E853719d2AaBf840"
},
{
"name": "GNT",
"symbol": "GNT",
"decimals": 18,
"address": "0xE3D1be6c892D4f7b09c3940b67741104c2FFB34b"
"address": "0xdE598699a5f81F5D34e11E59Bd29E4f313d3640A"
},
{
"name": "MLTT",
"symbol": "MLTT",
"decimals": 18,
"address": "0x632655228439172CC496A17c0ceA170917FebE18"
"address": "0x11Ac20D616c43Ac69263EDCCc43fFB895b637d30"
},
{
"name": "DAIK",
"symbol": "DAIK",
"decimals": 18,
"address": "0x211e762c89e588AfB3ec50b0017Cd5250bBb111A"
"address": "0x6DcAB7Fd57E5f9FCDFcfB4338f519afa0F628704"
},
{
"name": "wBTCK",
"symbol": "wBTCK",
"decimals": 8,
"address": "0xEFc9af2B75706c223C1Af2e4dEaBDfeE46f8Bf85"
"address": "0x63f98422F80cf92bb2A3B68EE5826CED60956893"
},
{
"name": "BATK",
"symbol": "BATS",
"decimals": 18,
"address": "0x41a0577764aDdc7B86149884324e1E53A5EE82C6"
"address": "0x1Cb210B8aC9bF17502dA63c147b5C1C103916Dc1"
},
{
"name": "GNTK",
"symbol": "GNTS",
"decimals": 18,
"address": "0xc21042CA11b84072229b91750805Fd611290ecF8"
"address": "0x0c93A66EcBD53F8b5025c5A47b36F25e70Bec872"
},
{
"name": "MLTTK",
"symbol": "MLTTS",
"decimals": 18,
"address": "0xe1630b33Dcad7d2376FDA96cd19F2A3A633E60ce"
"address": "0xD6dE83bdc21C4EFE1832976cF4Daa5f0F6d156bE"
},
{
"name": "DAIL",
"symbol": "DAIL",
"decimals": 18,
"address": "0xE982e1ff2d8972B09aCF7728E8b5e23149bd74eb"
"address": "0x3c138Dd13EC52C1Ea7e73c8DD16EF6D0593bDcf3"
},
{
"name": "wBTCL",
"symbol": "wBTCP",
"decimals": 8,
"address": "0x6ca4A813AcAcfF0CA91746a759D0ffD4420eF160"
"address": "0x76258E6b9F71d45BCeAA0CC6514B910d7D8aC807"
},
{
"name": "BATL",
"symbol": "BATW",
"decimals": 18,
"address": "0xA16cABea54068663d9295Fc63093724ed97a1aF7"
"address": "0xc7fc371d3340337407bDfE48e2113085DD89FFe2"
},
{
"name": "GNTL",
"symbol": "GNTW",
"decimals": 18,
"address": "0x11da4a6F691E170439E88505d45f9800751e6AAd"
"address": "0xAAFc93A3E06857a40d82fB61F038288dCDfcE7eF"
},
{
"name": "MLTTL",
"symbol": "MLTTW",
"decimals": 18,
"address": "0x27D1cd196aB20961c6345a4FB2715C98856Aa2c6"
"address": "0x0F3934f734e62Daa87715B3E4640eFDB89246571"
},
{
"name": "Wrapped Ether",
"symbol": "WETH",
"decimals": 18,
"address": "0xa578622084d27E5c9D1590556ac9504DcC0503a2"
"address": "0x421e18B8c90Fe0e4AA6f049d6D5fdbA3093b698C"
}
]

0 comments on commit 79df3ee

Please sign in to comment.