From 7044e58e5062610bfbeecd6be417825fd763f05e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jorge=20P=C3=A9rez?= Date: Wed, 17 Jan 2024 15:26:30 +0100 Subject: [PATCH] Test Polygon --- .gitignore | 3 +- docker-compose.yml | 2 +- migrations/1705420603000-NewBackfillTables.ts | 25 ++++ ...870349094-AddV4CancelAndMultiplexEvents.ts | 118 ++++++--------- src/abis.ts | 25 ++++ src/config.ts | 13 +- src/constants.ts | 7 +- src/data_sources/events/web3.ts | 41 ++++-- src/entities/index.ts | 1 - src/entities/token_transfer_event.ts | 16 -- src/events.ts | 41 ++++-- src/index.ts | 32 ++-- src/ormconfig.ts | 22 ++- src/parsers/events/cancel_events.ts | 36 +++-- src/parsers/events/token_transfer_events.ts | 11 -- src/parsers/index.ts | 2 +- src/scripts/backfill_events.ts | 139 +++++++++--------- src/scripts/monitor_current_block.ts | 8 +- src/scripts/pull_and_save_block_events.ts | 123 +++++++++++++--- src/scripts/pull_and_save_events_by_topic.ts | 2 +- src/scripts/pull_and_save_legacy_events.ts | 25 ---- src/scripts/utils/event_abi_utils.ts | 6 +- src/scripts/utils/web3_utils.ts | 4 +- src/tokenMetadataSingleton.ts | 2 +- 24 files changed, 397 insertions(+), 307 deletions(-) create mode 100644 migrations/1705420603000-NewBackfillTables.ts delete mode 100644 src/entities/token_transfer_event.ts delete mode 100644 src/parsers/events/token_transfer_events.ts diff --git a/.gitignore b/.gitignore index 60edadad..1be707af 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ -.env +.env* +!.env.example mounts/ node_modules lib diff --git a/docker-compose.yml b/docker-compose.yml index 7ba6dcf9..30b02bee 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -118,7 +118,7 @@ services: dockerfile: Dockerfile restart: always environment: - EVM_RPC_URL: '${RPC_URL_POLYGON}' + EVM_RPC_URL: '${EVM_RPC_URL_POLYGON}' CHAIN_ID: '137' POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}' SCHEMA: 'events_polygon' diff --git a/migrations/1705420603000-NewBackfillTables.ts b/migrations/1705420603000-NewBackfillTables.ts new file mode 100644 index 00000000..70d5bb41 --- /dev/null +++ b/migrations/1705420603000-NewBackfillTables.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner, getConnection } from 'typeorm'; + +export class NewBackfillTables1705420603000 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const connection = getConnection(); + const { schema } = connection.options as any; + await queryRunner.query(` + ALTER TABLE ${schema}.events_backfill RENAME TO backfill_events; + + CREATE TABLE ${schema}.backfill_blocks ( + block_number BIGINT NOT NULL, + PRIMARY KEY (block_number) + ); + `); + } + + public async down(queryRunner: QueryRunner): Promise { + const connection = getConnection(); + const { schema } = connection.options as any; + await queryRunner.query(` + DROP TABLE ${schema}.backfill_blocks; + ALTER TABLE ${schema}.backfill_events RENAME TO events_backfill; + `); + } +} diff --git a/migrations/ethereum/1615870349094-AddV4CancelAndMultiplexEvents.ts b/migrations/ethereum/1615870349094-AddV4CancelAndMultiplexEvents.ts index c06874ad..7953c968 100644 --- a/migrations/ethereum/1615870349094-AddV4CancelAndMultiplexEvents.ts +++ b/migrations/ethereum/1615870349094-AddV4CancelAndMultiplexEvents.ts @@ -1,81 +1,53 @@ -import { MigrationInterface, QueryRunner, Table } from 'typeorm'; - -const eventsV4CancelEvents = new Table({ - name: 'events.v4_cancel_events', - columns: [ - { name: 'observed_timestamp', type: 'bigint' }, - { name: 'contract_address', type: 'varchar' }, - { name: 'transaction_hash', type: 'varchar', isPrimary: true }, - { name: 'transaction_index', type: 'bigint' }, - { name: 'log_index', type: 'bigint', isPrimary: true }, - { name: 'block_hash', type: 'varchar' }, - { name: 'block_number', type: 'bigint' }, - - { name: 'maker', type: 'varchar' }, - { name: 'order_hash', type: 'varchar' }, - ], -}); - -const eventsExpiredRfqOrderEvents = new Table({ - name: 'events.expired_rfq_order_events', - columns: [ - { name: 'observed_timestamp', type: 'bigint' }, - { name: 'contract_address', type: 'varchar' }, - { name: 'transaction_hash', type: 'varchar', isPrimary: true }, - { name: 'transaction_index', type: 'bigint' }, - { name: 'log_index', type: 'bigint', isPrimary: true }, - { name: 'block_hash', type: 'varchar' }, - { name: 'block_number', type: 'bigint' }, - - { name: 'maker', type: 'varchar' }, - { name: 'order_hash', type: 'varchar' }, - { name: 'expiry', type: 'numeric' }, - ], -}); - -const indexQuery = ` - - CREATE INDEX v4_cancel_events_transaction_hash_index - ON events.v4_cancel_events (transaction_hash); - CREATE INDEX v4_cancel_events_block_number_index - ON events.v4_cancel_events (block_number); - CREATE INDEX v4_cancel_events_maker_index - ON events.v4_cancel_events (maker); - CREATE INDEX v4_cancel_events_order_hash_index - ON events.v4_cancel_events (order_hash); - - - CREATE INDEX expired_rfq_events_block_number_index - ON events.expired_rfq_order_events (block_number); - CREATE INDEX expired_rfq_events_transaction_hash_index - ON events.expired_rfq_order_events (transaction_hash); - CREATE INDEX expired_rfq_events_maker_index - ON events.expired_rfq_order_events (maker); - CREATE INDEX expired_rfq_events_order_hash_index - ON events.v4_cancel_events (order_hash); -`; - -const dropIndexQuery = ` - DROP INDEX events.v4_cancel_events_transaction_hash_index; - DROP INDEX events.v4_cancel_events_block_number_index; - DROP INDEX events.v4_cancel_events_maker_index; - DROP INDEX events.v4_cancel_events_order_hash_index; - DROP INDEX events.expired_rfq_events_block_number_index; - DROP INDEX events.expired_rfq_events_transaction_hash_index; - DROP INDEX events.expired_rfq_events_maker_index; - DROP INDEX events.expired_rfq_events_order_hash_index; -`; +import { MigrationInterface, QueryRunner, getConnection } from 'typeorm'; export class AddV4CancelAndMultiplexEvents1615870349094 implements MigrationInterface { public async up(queryRunner: QueryRunner): Promise { - await queryRunner.createTable(eventsV4CancelEvents); - await queryRunner.createTable(eventsExpiredRfqOrderEvents); - await queryRunner.query(indexQuery); + const connection = getConnection(); + const { schema } = connection.options as any; + await queryRunner.query(` + CREATE TABLE ${schema}.v4_cancel_events ( + observed_timestamp int8 NOT NULL, + contract_address varchar NOT NULL, + transaction_hash varchar NOT NULL, + transaction_index int8 NOT NULL, + log_index int8 NOT NULL, + block_hash varchar NOT NULL, + block_number int8 NOT NULL, + maker varchar NOT NULL, + order_hash varchar NOT NULL, + CONSTRAINT v4_cancel_events_pk PRIMARY KEY (transaction_hash, log_index) + ); + CREATE INDEX v4_xancel_events_order_hash_index ON ${schema}.v4_cancel_events USING btree (order_hash); + CREATE INDEX v4_cancel_events_block_number_index ON ${schema}.v4_cancel_events USING btree (block_number); + CREATE INDEX v4_cancel_events_maker_index ON ${schema}.v4_cancel_events USING btree (maker); + CREATE INDEX v4_cancel_events_order_hash_index ON ${schema}.v4_cancel_events USING btree (order_hash); + CREATE INDEX v4_cancel_events_transaction_hash_index ON ${schema}.v4_cancel_events USING btree (transaction_hash); + + CREATE TABLE ${schema}.expired_rfq_order_events ( + observed_timestamp int8 NOT NULL, + contract_address varchar NOT NULL, + transaction_hash varchar NOT NULL, + transaction_index int8 NOT NULL, + log_index int8 NOT NULL, + block_hash varchar NOT NULL, + block_number int8 NOT NULL, + maker varchar NOT NULL, + order_hash varchar NOT NULL, + expiry numeric NOT NULL, + CONSTRAINT expired_rfq_order_events_pk PRIMARY KEY (transaction_hash, log_index) + ); + CREATE INDEX expired_rfq_events_block_number_index ON ${schema}.expired_rfq_order_events USING btree (block_number); + CREATE INDEX expired_rfq_events_maker_index ON ${schema}.expired_rfq_order_events USING btree (maker); + CREATE INDEX expired_rfq_events_transaction_hash_index ON ${schema}.expired_rfq_order_events USING btree (transaction_hash); +`); } public async down(queryRunner: QueryRunner): Promise { - await queryRunner.query(dropIndexQuery); - await queryRunner.dropTable(eventsExpiredRfqOrderEvents); - await queryRunner.dropTable(eventsV4CancelEvents); + const connection = getConnection(); + const { schema } = connection.options as any; + await queryRunner.query(` + DROP TABLE ${schema}.v4_cancel_events; + DROP TABLE ${schema}.expired_rfq_order_events; + `); } } diff --git a/src/abis.ts b/src/abis.ts index de45cfa5..4a447925 100644 --- a/src/abis.ts +++ b/src/abis.ts @@ -24,6 +24,31 @@ export const EXPIRED_RFQ_ORDER_ABI = { type: 'event', }; +export const V3_CANCEL_ABI = { + anonymous: false, + inputs: [ + { indexed: true, internalType: 'address', name: 'makerAddress', type: 'address' }, + { indexed: true, internalType: 'address', name: 'feeRecipientAddress', type: 'address' }, + { indexed: false, internalType: 'bytes', name: 'makerAssetData', type: 'bytes' }, + { indexed: false, internalType: 'bytes', name: 'takerAssetData', type: 'bytes' }, + { indexed: false, internalType: 'address', name: 'senderAddress', type: 'address' }, + { indexed: true, internalType: 'bytes32', name: 'orderHash', type: 'bytes32' }, + ], + name: 'Cancel', + type: 'event', +}; + +export const V3_CANCEL_UP_TO_ABI = { + anonymous: false, + inputs: [ + { indexed: true, internalType: 'address', name: 'makerAddress', type: 'address' }, + { indexed: true, internalType: 'address', name: 'orderSenderAddress', type: 'address' }, + { indexed: false, internalType: 'uint256', name: 'orderEpoch', type: 'uint256' }, + ], + name: 'CancelUpTo', + type: 'event', +}; + export const V4_CANCEL_ABI = { anonymous: false, inputs: [ diff --git a/src/config.ts b/src/config.ts index c7b91bb2..70e1d610 100644 --- a/src/config.ts +++ b/src/config.ts @@ -24,6 +24,7 @@ import { DEFAULT_FEAT_UNISWAP_V3_POOL_CREATED_EVENT, DEFAULT_FEAT_UNISWAP_V3_SWAP_EVENT, DEFAULT_FEAT_UNISWAP_V3_VIP_SWAP_EVENT, + DEFAULT_FEAT_V3_CANCEL_EVENTS, DEFAULT_FEAT_V3_FILL_EVENT, DEFAULT_FEAT_V3_NATIVE_FILL, DEFAULT_FEAT_WRAP_UNWRAP_NATIVE_EVENT, @@ -68,6 +69,8 @@ interface BridgeContract { startingBlock: number; } +export const RESCRAPE_BLOCKS = 10; //TODO: delete me + const bridgeContracts = [ { contract: '0x1c29670f7a77f1052d30813a0a4f632c78a02610', startingBlock: 9613431 }, { contract: '0x991c745401d5b5e469b8c3e2cb02c748f08754f1', startingBlock: 9613441 }, @@ -269,6 +272,7 @@ validateAddress( ); export const FEAT_V3_FILL_EVENT = getBoolConfig('FEAT_V3_FILL_EVENT', DEFAULT_FEAT_V3_FILL_EVENT); +export const FEAT_V3_CANCEL_EVENTS = getBoolConfig('FEAT_V3_CANCEL_EVENTS', DEFAULT_FEAT_V3_CANCEL_EVENTS); export const FEAT_OTC_ORDERS = getBoolConfig('FEAT_OTC_ORDERS', DEFAULT_FEAT_OTC_ORDERS); @@ -370,15 +374,6 @@ export const FEAT_TOKENS_FROM_TRANSFERS = getBoolConfig( DEFAULT_FEAT_TOKENS_FROM_TRANSFERS, ); -export const TOKENS_FROM_TRANSFERS_START_BLOCK = getIntConfig('TOKENS_FROM_TRANSFERS_START_BLOCK', -1); - -validateStartBlock( - 'TOKENS_FROM_TRANSFERS_START_BLOCK', - TOKENS_FROM_TRANSFERS_START_BLOCK, - 'FEAT_TOKENS_FROM_TRANSFERS', - FEAT_TOKENS_FROM_TRANSFERS, -); - export const FEAT_META_TRANSACTION_EXECUTED_EVENT = getBoolConfig( 'FEAT_META_TRANSACTION_EXECUTED_EVENT', DEFAULT_FEAT_META_TRANSACTION_EXECUTED_EVENT, diff --git a/src/constants.ts b/src/constants.ts index 04c4edd0..4514f00e 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -33,6 +33,7 @@ export const DEFAULT_FEAT_UNISWAP_V2_VIP_SWAP_EVENT = false; export const DEFAULT_FEAT_UNISWAP_V3_VIP_SWAP_EVENT = false; export const DEFAULT_FEAT_UNISWAP_V3_SWAP_EVENT = false; export const DEFAULT_FEAT_UNISWAP_V3_POOL_CREATED_EVENT = false; +export const DEFAULT_FEAT_V3_CANCEL_EVENTS = false; export const DEFAULT_FEAT_V3_FILL_EVENT = false; export const DEFAULT_FEAT_V3_NATIVE_FILL = false; export const DEFAULT_FEAT_VIP_SWAP_EVENT = false; @@ -59,6 +60,7 @@ export const UNISWAP_V2_PAIR_CREATED_TOPIC = ['0x0d3648bd0f6ba80134a33ba9275ac58 export const UNISWAP_V2_SYNC_TOPIC = ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1']; export const V3_EXCHANGE_ADDRESS = '0x61935cbdd02287b511119ddb11aeb42f1593b7ef'; +export const V3_DEPLOYMENT_BLOCK = 8952139; export const UNISWAP_V2_SWAP_EVENT_TOPIC_0 = '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822'; export const UNISWAP_V3_SWAP_EVENT_TOPIC_0 = '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67'; @@ -81,8 +83,6 @@ export const ERC1155_ORDER_CANCELLED_EVENT_TOPIC = [ '0x81b6de71b4c5058b59a7b56dc73297dd4820029a7229cf7b8e9680d73ff9bab0', ]; -export const TOKEN_TRANSFER_EVENT_TOPIC = ['0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef']; - export const ERC165_SUPPORTS_INTERFACE_SELECTOR = '01ffc9a7'; export const ERC165_ERC721_INTERFACE = '80ac58cd'; export const ERC165_ERC1155_INTERFACE = 'd9b67a26'; @@ -113,3 +113,6 @@ export const UNWRAP_NATIVE_EVENT_TOPIC = ['0x7fcf532c15f0a6db0bd6d0e038bea71d30d export const TRANSFER_EVENT_TOPIC_0 = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef'; export const ZEROEX_API_AFFILIATE_SELECTOR = '869584cd'; + +export const V3_CANCEL_EVENT_TOPIC = ['0x02c310a9a43963ff31a754a4099cc435ed498049687539d72d7818d9b093415c']; +export const V3_CANCEL_UP_TO_EVENT_TOPIC = ['0x82af639571738f4ebd4268fb0363d8957ebe1bbb9e78dba5ebd69eed39b154f0']; diff --git a/src/data_sources/events/web3.ts b/src/data_sources/events/web3.ts index 0af9d799..e707f1c8 100644 --- a/src/data_sources/events/web3.ts +++ b/src/data_sources/events/web3.ts @@ -16,7 +16,7 @@ const helpers = require('web3-core-helpers'); const formatter = helpers.formatters; export interface LogPullInfo { - address: string; + address: string | null; fromBlock: number; toBlock: number; topics: (string | null)[]; @@ -62,18 +62,36 @@ export class Web3Source { }); } + public async getBatchBlockInfoForRangeAsync( + startBlock: number, + endBlock: number, + includeTransactions: B, + ): Promise; + public async getBatchBlockInfoForRangeAsync( startBlock: number, endBlock: number, includeTransactions: boolean, - ): Promise { - const iter = Array.from(Array(endBlock - startBlock + 1).keys()); + ): Promise<(BlockWithoutTransactionData | BlockWithTransactionData1559)[]> { + const blockNumbers = Array.from(Array(endBlock - startBlock + 1).keys()).map((i) => i + startBlock); + return this.getBatchBlockInfoAsync(blockNumbers, includeTransactions); + } + + public async getBatchBlockInfoAsync( + blockNumbers: number[], + includeTransactions: B, + ): Promise; + + public async getBatchBlockInfoAsync( + blockNumbers: number[], + includeTransactions: boolean, + ): Promise<(BlockWithoutTransactionData | BlockWithTransactionData1559)[]> { const batch = new this._web3.BatchRequest(); - const promises = iter.map((i) => { - return new Promise((resolve, reject) => { + const promises = blockNumbers.map((blockNumber) => { + return new Promise((resolve, reject) => { const req = this._web3.eth.getBlock.request( - i + startBlock, + blockNumber, includeTransactions, (err: any, data: BlockWithTransactionData1559) => { if (err) { @@ -97,17 +115,20 @@ export class Web3Source { startBlock: number, endBlock: number, ): Promise { - const iter = Array.from(Array(endBlock - startBlock + 1).keys()); + const blockNumbers = Array.from(Array(endBlock - startBlock + 1).keys()).map((i) => i + startBlock); + return this.getBatchBlockReceiptsAsync(blockNumbers); + } - const promises = iter.map((i) => { - return this._web3.eth.getBlockReceipts(i + startBlock).catch((err: any) => { + public async getBatchBlockReceiptsAsync(blockNumbers: number[]): Promise { + const promises = blockNumbers.map((blockNumber) => { + return this._web3.eth.getBlockReceipts(blockNumber).catch((err: any) => { logger.error(`Blocks error: ${err}`); }); }); const blocks = await Promise.all(promises); - return blocks as TransactionReceipt1559[][]; + return blocks; } public async getBatchTxInfoAsync(hashes: string[]): Promise { diff --git a/src/entities/index.ts b/src/entities/index.ts index 2152a8db..192499de 100644 --- a/src/entities/index.ts +++ b/src/entities/index.ts @@ -35,7 +35,6 @@ export { StakingPoolMetadata } from './staking_pool_metadata'; export { StakingProxyDeployment } from './staking_proxy_deployment'; export { TokenMetadata } from './token_metadata'; export { TokenRegistry } from './token_registry'; -export { TokenTransferEvent } from './token_transfer_event'; export { Transaction } from './transaction'; export { TransactionExecutionEvent } from './transaction_execution_event'; export { TransactionLogs } from './transaction_log'; diff --git a/src/entities/token_transfer_event.ts b/src/entities/token_transfer_event.ts deleted file mode 100644 index 50d9cbc6..00000000 --- a/src/entities/token_transfer_event.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { BigNumber } from '@0x/utils'; -import { Column, Entity } from 'typeorm'; - -import { Event } from './event'; -import { bigNumberTransformer } from '../transformers'; - -// THis event is not save to the DB, just used to detect new tokens -@Entity({ name: 'none' }) -export class TokenTransferEvent extends Event { - @Column({ name: 'from', type: 'varchar' }) - public from!: string; - @Column({ name: 'to', type: 'varchar' }) - public to!: string; - @Column({ name: 'value', type: 'numeric', transformer: bigNumberTransformer }) - public value!: BigNumber; -} diff --git a/src/events.ts b/src/events.ts index 3e679ffe..a36facb4 100644 --- a/src/events.ts +++ b/src/events.ts @@ -11,7 +11,6 @@ import { FEAT_POLYGON_RFQM_PAYMENTS, FEAT_RFQ_EVENT, FEAT_SOCKET_BRIDGE_EVENT, - FEAT_TOKENS_FROM_TRANSFERS, FEAT_TRANSFORMED_ERC20_EVENT, FEAT_UNISWAP_V2_PAIR_CREATED_EVENT, FEAT_UNISWAP_V2_SYNC_EVENT, @@ -19,6 +18,7 @@ import { FEAT_UNISWAP_V3_POOL_CREATED_EVENT, FEAT_UNISWAP_V3_SWAP_EVENT, FEAT_UNISWAP_V3_VIP_SWAP_EVENT, + FEAT_V3_CANCEL_EVENTS, FEAT_V3_FILL_EVENT, FEAT_V3_NATIVE_FILL, FEAT_WRAP_UNWRAP_NATIVE_EVENT, @@ -67,7 +67,6 @@ import { PROTOCOL_ZEROEX_TIMELOCK_CONTRACT_ADDRESS, RFQ_ORDER_FILLED_EVENT_TOPIC, SOCKET_BRIDGE_EVENT_TOPIC, - TOKEN_TRANSFER_EVENT_TOPIC, TRANSFER_EVENT_TOPIC_0, TRANSFORMEDERC20_EVENT_TOPIC, TREASURY_ZEROEX_TIMELOCK_CONTRACT_ADDRESS, @@ -78,7 +77,10 @@ import { UNISWAP_V3_SWAP_EVENT_TOPIC_0, UNWRAP_NATIVE_EVENT_TOPIC, V3_EXCHANGE_ADDRESS, + V3_DEPLOYMENT_BLOCK, V3_FILL_EVENT_TOPIC, + V3_CANCEL_EVENT_TOPIC, + V3_CANCEL_UP_TO_EVENT_TOPIC, V4_CANCEL_EVENT_TOPIC, WRAP_NATIVE_EVENT_TOPIC, ZEROEX_PROTOCOL_GOVERNOR_CONTRACT_ADDRESS, @@ -97,6 +99,8 @@ import { ExpiredRfqOrderEvent, FillEvent, LogTransferEvent, + CancelEvent, + CancelUpToEvent, MetaTransactionExecutedEvent, NativeFill, OnchainGovernanceCallScheduledEvent, @@ -131,6 +135,8 @@ import { parseErc721OrderPresignedEvent, parseExpiredRfqOrderEvent, parseFillEvent, + parseCancelEvent, + parseCancelUpToEvent, parseLiquidityProviderSwapEvent, parseLogTransferEvent, parseMetaTransactionExecutedEvent, @@ -142,7 +148,6 @@ import { parseOnchainGovernanceProposalCreatedEvent, parseOtcOrderFilledEvent, parseSocketBridgeEvent, - parseTokenTransfer, parseTransformedERC20Event, parseUniswapV2PairCreatedEvent, parseUniswapV2SwapEvent, @@ -188,7 +193,7 @@ export type EventScraperProps = { enabled: boolean; name: string; tType: any; - table: string | null; + table: string; topics: (string | null)[]; contractAddress: string | null; startBlock: number; @@ -572,16 +577,26 @@ export const eventScrperProps: EventScraperProps[] = [ filterFunctionGetContext: filterSocketBridgeEventsGetContext, filterFunction: filterSocketBridgeEvents, }, + { - enabled: FEAT_TOKENS_FROM_TRANSFERS, - name: 'TokenTransferEvent', - tType: null, - table: null, - topics: TOKEN_TRANSFER_EVENT_TOPIC, - contractAddress: null, - startBlock: 0, - parser: parseTokenTransfer, - postProcess: saveTokens, + enabled: FEAT_V3_CANCEL_EVENTS, + name: 'CancelEvent', + tType: CancelEvent, + table: 'cancel_events', + topics: V3_CANCEL_EVENT_TOPIC, + contractAddress: V3_EXCHANGE_ADDRESS, + startBlock: V3_DEPLOYMENT_BLOCK, + parser: parseCancelEvent, + }, + { + enabled: FEAT_V3_CANCEL_EVENTS, + name: 'CancelUpToEvent', + tType: CancelUpToEvent, + table: 'cancel_up_to_events', + topics: V3_CANCEL_UP_TO_EVENT_TOPIC, + contractAddress: V3_EXCHANGE_ADDRESS, + startBlock: V3_DEPLOYMENT_BLOCK, + parser: parseCancelUpToEvent, }, ]; diff --git a/src/index.ts b/src/index.ts index 9b568444..d82dd20a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -2,7 +2,6 @@ import { CHAIN_ID, ENABLE_PROMETHEUS_METRICS, - FEAT_TOKENS_FROM_TRANSFERS, FEAT_UNISWAP_V2_PAIR_CREATED_EVENT, FEAT_UNISWAP_V3_POOL_CREATED_EVENT, KAFKA_AUTH_PASSWORD, @@ -14,13 +13,13 @@ import { import * as ormConfig from './ormconfig'; import { EventsBackfillScraper } from './scripts/backfill_events'; import { ChainIdChecker } from './scripts/check_chain_id'; +import { CurrentBlockMonitor } from './scripts/monitor_current_block'; import { BackfillTxScraper } from './scripts/pull_and_save_backfill_tx'; import { BlockEventsScraper } from './scripts/pull_and_save_block_events'; import { BlockScraper } from './scripts/pull_and_save_blocks'; import { EventsByTopicScraper } from './scripts/pull_and_save_events_by_topic'; import { LegacyEventScraper } from './scripts/pull_and_save_legacy_events'; import { TokensFromBackfill } from './scripts/pull_and_save_tokens_backfill'; -import { TokensFromTransfersScraper } from './scripts/pull_and_save_tokens_from_transfers'; import { TokenMetadataSingleton } from './tokenMetadataSingleton'; import { UniV2PoolSingleton } from './uniV2PoolSingleton'; import { UniV3PoolSingleton } from './uniV3PoolSingleton'; @@ -62,14 +61,15 @@ const blockScraper = new BlockScraper(); const eventsByTopicScraper = new EventsByTopicScraper(); const eventsBackfillScraper = new EventsBackfillScraper(); const blockEventsScraper = new BlockEventsScraper(); -// const currentBlockMonitor = new CurrentBlockMonitor(); -const tokensFromTransfersScraper = new TokensFromTransfersScraper(); +const currentBlockMonitor = new CurrentBlockMonitor(); const tokensFromBackfill = new TokensFromBackfill(); if (ENABLE_PROMETHEUS_METRICS) { startMetricsServer(); } +const SCRAPER_MODE = 'Blocks'; + chainIdChecker.checkChainId(CHAIN_ID); // run pull and save events @@ -86,8 +86,20 @@ createConnection(ormConfig as ConnectionOptions) if (FEAT_UNISWAP_V3_POOL_CREATED_EVENT) { await UniV3PoolSingleton.initInstance(connection); } - schedule(connection, producer, blockEventsScraper.getParseSaveAsync, 'Pull and Save Blocks and Events'); - //schedule(connection, producer, eventsBackfillScraper.getParseSaveEventsAsync, 'Backfill Events by Topic'); + if (SCRAPER_MODE === 'Blocks') { + schedule(connection, producer, blockEventsScraper.getParseSaveAsync, 'Pull and Save Blocks and Events'); + schedule(connection, producer, blockEventsScraper.backfillAsync, 'Backfill Blocks and Events'); + } else if (SCRAPER_MODE === 'Logs') { + schedule(null, null, currentBlockMonitor.monitor, 'Current Block'); + schedule(connection, producer, blockScraper.getParseSaveEventsAsync, 'Pull and Save Blocks'); + schedule( + connection, + producer, + eventsByTopicScraper.getParseSaveEventsAsync, + 'Pull and Save Events by Topic', + ); + } + schedule(connection, producer, eventsBackfillScraper.getParseSaveEventsAsync, 'Backfill Events by Topic'); //schedule( // connection, // producer, @@ -97,14 +109,6 @@ createConnection(ormConfig as ConnectionOptions) if (CHAIN_ID === 1) { // schedule(connection, null, legacyEventScraper.getParseSaveEventsAsync, 'Pull and Save Legacy Events'); } - if (FEAT_TOKENS_FROM_TRANSFERS) { - //schedule( - // connection, - // null, - // tokensFromBackfill.getParseSaveTokensFromBackfillAsync, - // 'Pull and Save Backfill Tokens', - //); - } }) .catch((error) => logger.error(error)); diff --git a/src/ormconfig.ts b/src/ormconfig.ts index 814f16c4..741614de 100644 --- a/src/ormconfig.ts +++ b/src/ormconfig.ts @@ -5,8 +5,6 @@ import { CancelUpToEvent, CurrentEpochInfo, ERC20BridgeTransferEvent, - UniswapV3SwapEvent, - UniswapV3PoolCreatedEvent, EpochEndedEvent, EpochFinalizedEvent, Erc1155OrderCancelledEvent, @@ -24,6 +22,8 @@ import { MetaTransactionExecutedEvent, MoveStakeEvent, NativeFill, + OnchainGovernanceCallScheduledEvent, + OnchainGovernanceProposalCreatedEvent, OperatorShareDecreasedEvent, OtcOrderFilledEvent, ParamsSetEvent, @@ -36,7 +36,6 @@ import { StakingProxyDeployment, TokenMetadata, TokenRegistry, - TokenTransferEvent, Transaction, TransactionExecutionEvent, TransactionLogs, @@ -44,14 +43,14 @@ import { TransformedERC20Event, UniswapV2PairCreatedEvent, UniswapV2SyncEvent, + UniswapV3PoolCreatedEvent, + UniswapV3SwapEvent, UnstakeEvent, + UnwrapNativeEvent, V4CancelEvent, V4LimitOrderFilledEvent, V4RfqOrderFilledEvent, - OnchainGovernanceProposalCreatedEvent, - OnchainGovernanceCallScheduledEvent, WrapNativeEvent, - UnwrapNativeEvent, } from './entities'; import { ConnectionOptions } from 'typeorm'; @@ -61,8 +60,6 @@ const entities = [ CancelUpToEvent, CurrentEpochInfo, ERC20BridgeTransferEvent, - UniswapV3SwapEvent, - UniswapV3PoolCreatedEvent, EpochEndedEvent, EpochFinalizedEvent, Erc1155OrderCancelledEvent, @@ -80,6 +77,8 @@ const entities = [ MetaTransactionExecutedEvent, MoveStakeEvent, NativeFill, + OnchainGovernanceCallScheduledEvent, + OnchainGovernanceProposalCreatedEvent, OperatorShareDecreasedEvent, OtcOrderFilledEvent, ParamsSetEvent, @@ -92,7 +91,6 @@ const entities = [ StakingProxyDeployment, TokenMetadata, TokenRegistry, - TokenTransferEvent, Transaction, TransactionExecutionEvent, TransactionLogs, @@ -100,14 +98,14 @@ const entities = [ TransformedERC20Event, UniswapV2PairCreatedEvent, UniswapV2SyncEvent, + UniswapV3PoolCreatedEvent, + UniswapV3SwapEvent, UnstakeEvent, + UnwrapNativeEvent, V4CancelEvent, V4LimitOrderFilledEvent, V4RfqOrderFilledEvent, - OnchainGovernanceProposalCreatedEvent, - OnchainGovernanceCallScheduledEvent, WrapNativeEvent, - UnwrapNativeEvent, ]; const config: ConnectionOptions = { diff --git a/src/parsers/events/cancel_events.ts b/src/parsers/events/cancel_events.ts index febc1c89..cc968540 100644 --- a/src/parsers/events/cancel_events.ts +++ b/src/parsers/events/cancel_events.ts @@ -1,24 +1,28 @@ +import { V3_CANCEL_ABI, V3_CANCEL_UP_TO_ABI } from '../../constants'; import { CancelEvent, CancelUpToEvent } from '../../entities'; import { convertAssetProxyIdToType } from '../../utils/proxyType'; import { parse0xAssetTokenAddress } from '../utils/asset_data_utils'; import { parseEvent } from './parse_event'; -import { ExchangeCancelEventArgs, ExchangeCancelUpToEventArgs } from '@0x/contract-wrappers'; import { assetDataUtils } from '@0x/order-utils'; import { AssetProxyId } from '@0x/types'; -import { LogWithDecodedArgs } from 'ethereum-types'; +import { LogEntry } from 'ethereum-types'; + +const abiCoder = require('web3-eth-abi'); /** * Converts a raw event log for a fill event into an ExchangeFillEvent entity. * @param eventLog Raw event log (e.g. returned from contract-wrappers). */ -export function parseCancelEvent(eventLog: LogWithDecodedArgs): CancelEvent { +export function parseCancelEvent(eventLog: LogEntry): CancelEvent { const cancelEvent = new CancelEvent(); parseEvent(eventLog, cancelEvent); + const decodedLog = abiCoder.decodeLog(V3_CANCEL_ABI.inputs, eventLog.data, eventLog.topics.slice(1)); + // Asset data could be invalid, wrap it in a try-except try { - const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData); - const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData); + const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(decodedLog.makerAssetData); + const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(decodedLog.takerAssetData); // tslint:disable-next-line:no-unnecessary-type-assertion cancelEvent.makerProxyType = convertAssetProxyIdToType(makerAssetData.assetProxyId as AssetProxyId); @@ -39,12 +43,12 @@ export function parseCancelEvent(eventLog: LogWithDecodedArgs): CancelUpToEvent { +export function parseCancelUpToEvent(eventLog: LogEntry): CancelUpToEvent { const cancelUpToEvent = new CancelUpToEvent(); parseEvent(eventLog, cancelUpToEvent); - cancelUpToEvent.makerAddress = eventLog.args.makerAddress; - cancelUpToEvent.senderAddress = eventLog.args.orderSenderAddress; - cancelUpToEvent.orderEpoch = eventLog.args.orderEpoch; + const decodedLog = abiCoder.decodeLog(V3_CANCEL_UP_TO_ABI.inputs, eventLog.data, eventLog.topics.slice(1)); + + cancelUpToEvent.makerAddress = decodedLog.makerAddress; + cancelUpToEvent.senderAddress = decodedLog.orderSenderAddress; + cancelUpToEvent.orderEpoch = decodedLog.orderEpoch; return cancelUpToEvent; } diff --git a/src/parsers/events/token_transfer_events.ts b/src/parsers/events/token_transfer_events.ts deleted file mode 100644 index 78f02b56..00000000 --- a/src/parsers/events/token_transfer_events.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { TokenTransferEvent } from '../../entities'; -import { parseEvent } from './parse_event'; -import { LogEntry } from 'ethereum-types'; - -export function parseTokenTransfer(eventLog: LogEntry): TokenTransferEvent { - const tokenTransferEvent = new TokenTransferEvent(); - - parseEvent(eventLog, tokenTransferEvent); - - return tokenTransferEvent; -} diff --git a/src/parsers/index.ts b/src/parsers/index.ts index 74eee577..ada67b49 100644 --- a/src/parsers/index.ts +++ b/src/parsers/index.ts @@ -1,5 +1,6 @@ export { parseBridgeFill } from './events/bridge_transfer_events'; export { parseExpiredRfqOrderEvent } from './events/expired_rfq_order_events'; +export { parseCancelEvent, parseCancelUpToEvent } from './events/cancel_events'; export { parseFillEvent } from './events/fill_events'; export { parseNativeFillFromFillEvent } from './events/fill_events'; export { parseLiquidityProviderSwapEvent } from './events/liquidity_provider_swap_events'; @@ -45,4 +46,3 @@ export { parseWrapNativeTransferEvent, parseUnwrapNativeTransferEvent, } from './events/wrap_unwrap_native_events'; -export { parseTokenTransfer } from './events/token_transfer_events'; diff --git a/src/scripts/backfill_events.ts b/src/scripts/backfill_events.ts index 50ed63ba..eb3a763e 100644 --- a/src/scripts/backfill_events.ts +++ b/src/scripts/backfill_events.ts @@ -20,19 +20,6 @@ const pullAndSaveEventsByTopic = new PullAndSaveEventsByTopic(); export class EventsBackfillScraper { public async getParseSaveEventsAsync(connection: Connection, producer: Producer): Promise { const startTime = new Date().getTime(); - logger.info(`Pulling Events by Topic Backfill`); - const currentBlock = await web3Source.getCurrentBlockAsync(); - - logger.info(`latest block: ${currentBlock.number}`); - - const promises: Promise[] = []; - - const commonParams: CommonEventParams = { - connection, - producer, - web3Source, - }; - const oldestBlocksForEvents = await connection .getRepository(EventBackfill) .createQueryBuilder('event') @@ -41,69 +28,83 @@ export class EventsBackfillScraper { .groupBy('event.name') .getRawMany(); - const backfillEventsOldestBlock = new Map(); + if (oldestBlocksForEvents.length > 0) { + logger.info(`Pulling Events by Topic Backfill`); - oldestBlocksForEvents.forEach((event) => { - backfillEventsOldestBlock.set(event.name, parseInt(event.oldestBlockNumber)); - }); + const backfillEventsOldestBlock = new Map(); - eventScrperProps.forEach((props: EventScraperProps) => { - if (backfillEventsOldestBlock.has(props.name)) { - promises.push( - pullAndSaveEventsByTopic - .getParseSaveEventsByTopicBackfill( - commonParams.connection, - commonParams.producer, - commonParams.web3Source, - currentBlock, - props.name, - props.tType, - props.table, - props.topics, - props.contractAddress, - props.startBlock, - props.parser, - backfillEventsOldestBlock.get(props.name)!, - props.deleteOptions, - props.tokenMetadataMap, - props.callback, - props.filterFunctionGetContext, - ) - .then(async ({ transactionHashes, startBlockNumber, endBlockNumber }) => { - if (startBlockNumber !== null && endBlockNumber !== null) { - await connection - .getRepository(EventBackfill) - .createQueryBuilder('event') - .delete() - .from(EventBackfill) - .where('blockNumber >= :startBlockNumber', { startBlockNumber }) - .andWhere('blockNumber <= :endBlockNumber', { endBlockNumber }) - .execute(); - } + oldestBlocksForEvents.forEach((event) => { + backfillEventsOldestBlock.set(event.name, parseInt(event.oldestBlockNumber)); + }); - return transactionHashes; - }), - ); - } - }); + const currentBlock = await web3Source.getCurrentBlockAsync(); + + const promises: Promise[] = []; - const txHashes = [ - ...new Set( - (await Promise.all(promises)).reduce( - (accumulator: string[], value: string[]) => accumulator.concat(value), - [], + const commonParams: CommonEventParams = { + connection, + producer, + web3Source, + }; + + eventScrperProps.forEach((props: EventScraperProps) => { + if (backfillEventsOldestBlock.has(props.name)) { + promises.push( + pullAndSaveEventsByTopic + .getParseSaveEventsByTopicBackfill( + commonParams.connection, + commonParams.producer, + commonParams.web3Source, + currentBlock, + props.name, + props.tType, + props.table, + props.topics, + props.contractAddress, + props.startBlock, + props.parser, + backfillEventsOldestBlock.get(props.name)!, + props.deleteOptions, + props.tokenMetadataMap, + props.postProcess, + props.filterFunctionGetContext, + ) + .then(async ({ transactionHashes, startBlockNumber, endBlockNumber }) => { + if (startBlockNumber !== null && endBlockNumber !== null) { + await connection + .getRepository(EventBackfill) + .createQueryBuilder('event') + .delete() + .from(EventBackfill) + .where('blockNumber >= :startBlockNumber', { startBlockNumber }) + .andWhere('blockNumber <= :endBlockNumber', { endBlockNumber }) + .execute(); + } + + return transactionHashes; + }), + ); + } + }); + + const txHashes = [ + ...new Set( + (await Promise.all(promises)).reduce( + (accumulator: string[], value: string[]) => accumulator.concat(value), + [], + ), ), - ), - ] as string[]; + ] as string[]; - if (txHashes.length) { - await getParseSaveTxAsync(connection, producer, web3Source, txHashes); - } + if (txHashes.length) { + await getParseSaveTxAsync(connection, producer, web3Source, txHashes); + } - const endTime = new Date().getTime(); - const scriptDurationSeconds = (endTime - startTime) / 1000; - SCRIPT_RUN_DURATION.set({ script: 'events-by-topic-backfill' }, scriptDurationSeconds); + const endTime = new Date().getTime(); + const scriptDurationSeconds = (endTime - startTime) / 1000; + SCRIPT_RUN_DURATION.set({ script: 'events-by-topic-backfill' }, scriptDurationSeconds); - logger.info(`Finished backfilling events by topic in ${scriptDurationSeconds}`); + logger.info(`Finished backfilling events by topic in ${scriptDurationSeconds}`); + } } } diff --git a/src/scripts/monitor_current_block.ts b/src/scripts/monitor_current_block.ts index cdc484ea..0cfe3bc7 100644 --- a/src/scripts/monitor_current_block.ts +++ b/src/scripts/monitor_current_block.ts @@ -1,19 +1,13 @@ import { CHAIN_NAME, EVM_RPC_URL } from '../config'; import { logger } from '../utils/logger'; +import { CURRENT_BLOCK } from '../utils/metrics'; import { web3Factory } from '@0x/dev-utils'; import { Web3Wrapper } from '@0x/web3-wrapper'; -import { Gauge } from 'prom-client'; const provider = web3Factory.getRpcProvider({ rpcUrl: EVM_RPC_URL, }); -export const CURRENT_BLOCK = new Gauge({ - name: 'event_scraper_current_block', - help: 'The current head of the chain', - labelNames: ['chain'], -}); - export class CurrentBlockMonitor { public async monitor(): Promise { const web3Wrapper = new Web3Wrapper(provider); diff --git a/src/scripts/pull_and_save_block_events.ts b/src/scripts/pull_and_save_block_events.ts index 011086f2..d2a57f72 100644 --- a/src/scripts/pull_and_save_block_events.ts +++ b/src/scripts/pull_and_save_block_events.ts @@ -4,8 +4,10 @@ import { EVM_RPC_URL, MAX_BLOCKS_REORG, MAX_BLOCKS_TO_PULL, + FEAT_TOKENS_FROM_TRANSFERS, SCHEMA, } from '../config'; +import { TRANSFER_EVENT_TOPIC_0 } from '../constants'; import { Web3Source, BlockWithTransactionData1559 as EVMBlock } from '../data_sources/events/web3'; import { Transaction1559 as EVMTransaction, @@ -17,6 +19,7 @@ import { parseBlock, parseTransaction, parseTransactionReceipt } from '../parser import { chunk, logger } from '../utils'; import { CURRENT_BLOCK, SCRIPT_RUN_DURATION } from '../utils/metrics'; import { contractTopicFilter } from './utils/block_utils'; +import { getParseSaveTokensAsync } from './utils/web3_utils'; import { web3Factory } from '@0x/dev-utils'; import { LogEntry } from 'ethereum-types'; import { Producer } from 'kafkajs'; @@ -121,6 +124,28 @@ function parseTransactionEvents(transaction: FullTransaction): ParsedTransaction }; } +type range = { + start: number; + end: number; +}; + +function findRanges(nums: number[]): range[] { + const sorted = [...new Set(nums)].sort(); + const ranges: { start: number; end: number }[] = []; + const currentRange = { start: sorted[0], end: sorted[0] }; + for (let i = 1; i < sorted.length; i++) { + if (currentRange.end + 1 === sorted[i]) { + currentRange.end = sorted[i]; + } else { + ranges.push(currentRange); + currentRange.start = sorted[i]; + currentRange.end = sorted[i]; + } + } + ranges.push(currentRange); + return ranges; +} + async function saveFullBlocks(connection: Connection, eventTables: string[], parsedFullBlocks: ParsedFullBlock[]) { const parsedBlocks = parsedFullBlocks.map((block) => block.parsedBlock); const parsedTransactions = parsedFullBlocks.map((block) => block.parsedTransactions).flat(); @@ -141,8 +166,7 @@ async function saveFullBlocks(connection: Connection, eventTables: string[], par }, {} as { [id: string]: TypedEvents }), ).filter((typedEvents) => typedEvents.events.length > 0); - const blockRangeStart = parsedBlocks[0].blockNumber; - const blockRangeEnd = parsedBlocks[parsedBlocks.length - 1].blockNumber; + const blockRanges = findRanges(parsedBlocks.map((block) => block.blockNumber)); const queryRunner = connection.createQueryRunner(); await queryRunner.connect(); @@ -154,14 +178,16 @@ async function saveFullBlocks(connection: Connection, eventTables: string[], par const deletePromises: Promise[] = []; tablesToDelete.forEach(async (tableName) => { - deletePromises.push( - queryRunner.manager.query( - `DELETE FROM ${SCHEMA}.${tableName} + blockRanges.forEach(async (blockRange) => { + deletePromises.push( + queryRunner.manager.query( + `DELETE FROM ${SCHEMA}.${tableName} WHERE - block_number >= ${blockRangeStart} AND - block_number <= ${blockRangeEnd}`, - ), - ); + block_number >= ${blockRange.start} AND + block_number <= ${blockRange.end}`, + ), + ); + }); }); await Promise.all(deletePromises); @@ -199,7 +225,7 @@ async function saveFullBlocks(connection: Connection, eventTables: string[], par if (err instanceof QueryFailedError && err.message === 'could not serialize access due to concurrent update') { logger.warn('Simultaneous write attempt, will retry on the next run'); } else { - logger.error(`Failed while saving full blocks ${blockRangeStart} - ${blockRangeEnd}`); + logger.error(`Failed while saving full blocks ${JSON.stringify(blockRanges)}`); logger.error(err); } await queryRunner.rollbackTransaction(); @@ -209,13 +235,16 @@ async function saveFullBlocks(connection: Connection, eventTables: string[], par } async function getParseSaveBlocksTransactionsEvents( connection: Connection, + producer: Producer | null, newBlocks: EVMBlock[], - blockRangeStart: number, - blockRangeEnd: number, ) { - logger.info(`Pulling Block Events for blocks: ${blockRangeStart} - ${blockRangeEnd}`); + const blockNumbers = newBlocks.map((newBlock) => newBlock.number!); - const newBlockReceipts = await web3Source.getBatchBlockReceiptsForRangeAsync(blockRangeStart, blockRangeEnd); + const blockRanges = findRanges(blockNumbers); + + logger.info(`Pulling Block Events for blocks: ${JSON.stringify(blockRanges)}`); + + const newBlockReceipts = await web3Source.getBatchBlockReceiptsAsync(blockNumbers); const fullBlocks: FullBlock[] = newBlocks.map((newBlock, blockIndex): FullBlock => { const transactionsWithLogs = newBlock.transactions.map( @@ -235,13 +264,64 @@ async function getParseSaveBlocksTransactionsEvents( const parsedFullBlocks = fullBlocks.map(parseBlockTransactionsEvents); - const eventTables = eventScrperProps.map((props: EventScraperProps) => props.table); + const eventTables = eventScrperProps + .filter((props) => props.enabled) + .map((props: EventScraperProps) => props.table); await saveFullBlocks(connection, eventTables, parsedFullBlocks); + + if (FEAT_TOKENS_FROM_TRANSFERS) { + const tokensFromTransfers = [ + ...new Set( + newBlockReceipts + .flat() + .map((tx) => tx.logs) + .flat() + .filter((log) => log.topics.length > 0 && log.topics[0] === TRANSFER_EVENT_TOPIC_0) + .map((log) => log.address), + ), + ]; + await getParseSaveTokensAsync(connection, producer, web3Source, tokensFromTransfers); + } } export class BlockEventsScraper { - public async getParseSaveAsync(connection: Connection, _producer: Producer | null): Promise { + public async backfillAsync(connection: Connection, producer: Producer | null): Promise { + const startTime = new Date().getTime(); + + const oldestBlocksToBackfill = await connection.query( + `SELECT DISTINCT block_number + FROM ${SCHEMA}.tx_backfill + ORDER BY block_number + LIMIT ${MAX_BLOCKS_TO_PULL}`, + ); + + if (oldestBlocksToBackfill.length > 0) { + logger.info(`Backfilling blocks`); + + const blockNumbers = oldestBlocksToBackfill.map( + (backfillBlock: { block_number: number }) => backfillBlock.block_number, + ); + + const newBlocks = await web3Source.getBatchBlockInfoAsync(blockNumbers, true); + getParseSaveBlocksTransactionsEvents(connection, producer, newBlocks); + + const queryRunner = connection.createQueryRunner(); + await queryRunner.connect(); + await queryRunner.manager.query( + `DELETE FROM ${SCHEMA}.backfill_blocks + WHERE block_number IN (${blockNumbers.join(',')})`, + ); + queryRunner.release(); + + const endTime = new Date().getTime(); + const scriptDurationSeconds = (endTime - startTime) / 1000; + SCRIPT_RUN_DURATION.set({ script: 'events-by-block-backfill' }, scriptDurationSeconds); + } + } + public async getParseSaveAsync(connection: Connection, producer: Producer | null): Promise { + const startTime = new Date().getTime(); + // Monitor const currentBlockNumber = await web3Source.getBlockNumberAsync(); @@ -255,7 +335,11 @@ export class BlockEventsScraper { }); if (lastKnownBlock === undefined) { - // TODO: coldStart + logger.warn('First Run'); + const firstStartBlock = Math.max(...eventScrperProps.map((props) => props.startBlock)); + logger.warn(`Going to start from block: ${firstStartBlock}`); + const newBlocks = await web3Source.getBatchBlockInfoForRangeAsync(firstStartBlock, firstStartBlock, true); + getParseSaveBlocksTransactionsEvents(connection, producer, newBlocks); return; } @@ -299,12 +383,11 @@ export class BlockEventsScraper { throw Error(`Big reorg detected, of more than ${lookback}, manual intervention needed`); } - const startTime = new Date().getTime(); - getParseSaveBlocksTransactionsEvents(connection, newBlocks, blockRangeStart, blockRangeEnd); + getParseSaveBlocksTransactionsEvents(connection, producer, newBlocks); const endTime = new Date().getTime(); const scriptDurationSeconds = (endTime - startTime) / 1000; - SCRIPT_RUN_DURATION.set({ script: 'events-by-topic' }, scriptDurationSeconds); + SCRIPT_RUN_DURATION.set({ script: 'events-by-block' }, scriptDurationSeconds); logger.info(`Finished pulling events block by in ${scriptDurationSeconds}`); } diff --git a/src/scripts/pull_and_save_events_by_topic.ts b/src/scripts/pull_and_save_events_by_topic.ts index 95f52a5a..48a79edb 100644 --- a/src/scripts/pull_and_save_events_by_topic.ts +++ b/src/scripts/pull_and_save_events_by_topic.ts @@ -50,7 +50,7 @@ export class EventsByTopicScraper { props.parser, props.deleteOptions, props.tokenMetadataMap, - props.callback, + props.postProcess, props.filterFunctionGetContext, ), ); diff --git a/src/scripts/pull_and_save_legacy_events.ts b/src/scripts/pull_and_save_legacy_events.ts index 28f17863..83a9132e 100644 --- a/src/scripts/pull_and_save_legacy_events.ts +++ b/src/scripts/pull_and_save_legacy_events.ts @@ -72,31 +72,6 @@ export class LegacyEventScraper { const promises: Promise[] = []; - if (FEAT_CANCEL_EVENTS) { - promises.push( - pullAndSaveEvents.getParseSaveContractWrapperEventsAsync( - connection, - web3Source, - currentBlock, - 'CancelEvent', - 'cancel_events', - eventsSource.getCancelEventsAsync.bind(eventsSource), - parseCancelEvent, - ), - ); - promises.push( - pullAndSaveEvents.getParseSaveContractWrapperEventsAsync( - connection, - web3Source, - currentBlock, - 'CancelUpToEvent', - 'cancel_up_to_events', - eventsSource.getCancelUpToEventsAsync.bind(eventsSource), - parseCancelUpToEvent, - ), - ); - } - if (FEAT_STAKING) { promises.push( pullAndSaveEvents.getParseSaveContractWrapperEventsAsync< diff --git a/src/scripts/utils/event_abi_utils.ts b/src/scripts/utils/event_abi_utils.ts index 890b6a6d..f050637a 100644 --- a/src/scripts/utils/event_abi_utils.ts +++ b/src/scripts/utils/event_abi_utils.ts @@ -26,7 +26,7 @@ export class PullAndSaveEventsByTopic { eventType: any, tableName: string, topics: (string | null)[], - contractAddress: string, + contractAddress: string | null, startSearchBlock: number, parser: (decodedLog: LogEntry) => Event, deleteOptions?: DeleteOptions, @@ -105,7 +105,7 @@ export class PullAndSaveEventsByTopic { eventType: any, tableName: string, topics: (string | null)[], - contractAddress: string, + contractAddress: string | null, startSearchBlock: number, parser: (decodedLog: LogEntry) => Event, startBlockNumber: number, @@ -161,7 +161,7 @@ export class PullAndSaveEventsByTopic { eventType: any, tableName: string, topics: (string | null)[], - contractAddress: string, + contractAddress: string | null, startSearchBlock: number, parser: (decodedLog: LogEntry) => Event, startBlockNumber: number, diff --git a/src/scripts/utils/web3_utils.ts b/src/scripts/utils/web3_utils.ts index cbd7b71f..501f4d34 100644 --- a/src/scripts/utils/web3_utils.ts +++ b/src/scripts/utils/web3_utils.ts @@ -439,7 +439,7 @@ export function extractTokensFromLogs(logs: any, tokenMetadataMap: TokenMetadata export async function getParseSaveTokensAsync( connection: Connection, - producer: Producer, + producer: Producer | null, web3Source: Web3Source, tokens: string[], ): Promise { @@ -635,7 +635,7 @@ export async function getParseTxsAsync(web3Source: Web3Source, hashes: string[]) export async function getParseSaveTxAsync( connection: Connection, - producer: Producer, + producer: Producer | null, web3Source: Web3Source, hashes: string[], ): Promise { diff --git a/src/tokenMetadataSingleton.ts b/src/tokenMetadataSingleton.ts index 2c24b415..aa202614 100644 --- a/src/tokenMetadataSingleton.ts +++ b/src/tokenMetadataSingleton.ts @@ -41,7 +41,7 @@ export class TokenMetadataSingleton { async saveNewTokenMetadata( connection: Connection, - producer: Producer, + producer: Producer | null, newTokenMetadata: TokenMetadata[], ): Promise { await connection