Skip to content

Commit

Permalink
Only start up the UniV2 Pool Singleton when needed
Browse files Browse the repository at this point in the history
  • Loading branch information
Ktl-XV committed Jul 19, 2023
1 parent a5f972c commit 515661c
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 1 deletion.
28 changes: 28 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,10 @@ services:
CHAIN_ID: '137'
POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}'
SCHEMA: 'events_polygon'
KAFKA_BROKERS: '${KAFKA_BROKERS}'
KAFKA_SSL: '${KAFKA_SSL}'
KAFKA_AUTH_USER: '${KAFKA_AUTH_USER}'
KAFKA_AUTH_PASSWORD: '${KAFKA_AUTH_PASSWORD}'
ENABLE_PROMETHEUS_METRICS: "true"
EP_DEPLOYMENT_BLOCK: 14391480
MAX_BLOCKS_TO_SEARCH: 1000
Expand Down Expand Up @@ -138,6 +142,10 @@ services:
CHAIN_ID: '43114'
POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}'
SCHEMA: 'events_avalanche'
KAFKA_BROKERS: '${KAFKA_BROKERS}'
KAFKA_SSL: '${KAFKA_SSL}'
KAFKA_AUTH_USER: '${KAFKA_AUTH_USER}'
KAFKA_AUTH_PASSWORD: '${KAFKA_AUTH_PASSWORD}'
ENABLE_PROMETHEUS_METRICS: "true"
EP_DEPLOYMENT_BLOCK: 3601700
MAX_BLOCKS_TO_SEARCH: 5000
Expand All @@ -161,6 +169,10 @@ services:
CHAIN_ID: '250'
POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}'
SCHEMA: 'events_fantom'
KAFKA_BROKERS: '${KAFKA_BROKERS}'
KAFKA_SSL: '${KAFKA_SSL}'
KAFKA_AUTH_USER: '${KAFKA_AUTH_USER}'
KAFKA_AUTH_PASSWORD: '${KAFKA_AUTH_PASSWORD}'
EP_ADDRESS: "0xDEF189DeAEF76E379df891899eb5A00a94cBC250"
ENABLE_PROMETHEUS_METRICS: "true"
EP_DEPLOYMENT_BLOCK: 18855765
Expand All @@ -185,6 +197,10 @@ services:
CHAIN_ID: '42220'
POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}'
SCHEMA: 'events_celo'
KAFKA_BROKERS: '${KAFKA_BROKERS}'
KAFKA_SSL: '${KAFKA_SSL}'
KAFKA_AUTH_USER: '${KAFKA_AUTH_USER}'
KAFKA_AUTH_PASSWORD: '${KAFKA_AUTH_PASSWORD}'
ENABLE_PROMETHEUS_METRICS: "true"
EP_DEPLOYMENT_BLOCK: 9350111
MAX_BLOCKS_TO_SEARCH: 5000
Expand All @@ -205,6 +221,10 @@ services:
CHAIN_ID: '10'
POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}'
SCHEMA: 'events_optimism'
KAFKA_BROKERS: '${KAFKA_BROKERS}'
KAFKA_SSL: '${KAFKA_SSL}'
KAFKA_AUTH_USER: '${KAFKA_AUTH_USER}'
KAFKA_AUTH_PASSWORD: '${KAFKA_AUTH_PASSWORD}'
EP_DEPLOYMENT_BLOCK: 1691335
MAX_BLOCKS_TO_SEARCH: 1000
MAX_BLOCKS_TO_PULL: 1000
Expand All @@ -225,6 +245,10 @@ services:
CHAIN_ID: '42161'
POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}'
SCHEMA: 'events_arbitrum'
KAFKA_BROKERS: '${KAFKA_BROKERS}'
KAFKA_SSL: '${KAFKA_SSL}'
KAFKA_AUTH_USER: '${KAFKA_AUTH_USER}'
KAFKA_AUTH_PASSWORD: '${KAFKA_AUTH_PASSWORD}'
EP_DEPLOYMENT_BLOCK: 4050733
MAX_BLOCKS_TO_SEARCH: 1000
MAX_BLOCKS_TO_PULL: 1000
Expand All @@ -249,6 +273,10 @@ services:
CHAIN_ID: '1'
POSTGRES_URI: 'postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres/${POSTGRES_DB}'
SCHEMA: 'events'
KAFKA_BROKERS: '${KAFKA_BROKERS}'
KAFKA_SSL: '${KAFKA_SSL}'
KAFKA_AUTH_USER: '${KAFKA_AUTH_USER}'
KAFKA_AUTH_PASSWORD: '${KAFKA_AUTH_PASSWORD}'
LOG_LEVEL: "info"
FEAT_EXCLUSIVE_TOKENS_FROM_TRANSACTIONS: "true"
TOKENS_FROM_TRANSACTIONS_START_BLOCK: 9193266
Expand Down
5 changes: 4 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import {
ENABLE_PROMETHEUS_METRICS,
FEAT_TOKENS_FROM_TRANSFERS,
FEAT_TX_BACKFILL,
FEAT_UNISWAP_V2_PAIR_CREATED_EVENT,
KAFKA_AUTH_PASSWORD,
KAFKA_AUTH_USER,
KAFKA_BROKERS,
Expand Down Expand Up @@ -70,7 +71,9 @@ createConnection(ormConfig as ConnectionOptions)
.then(async (connection) => {
await producer.connect();
await TokenMetadataSingleton.getInstance(connection, producer);
await UniV2PoolSingleton.initInstance(connection);
if (FEAT_UNISWAP_V2_PAIR_CREATED_EVENT) {
await UniV2PoolSingleton.initInstance(connection);
}
schedule(null, null, currentBlockMonitor.monitor, 'Current Block');
schedule(connection, producer, blockScraper.getParseSaveEventsAsync, 'Pull and Save Blocks');
schedule(connection, producer, eventsByTopicScraper.getParseSaveEventsAsync, 'Pull and Save Events by Topic');
Expand Down

0 comments on commit 515661c

Please sign in to comment.