Skip to content

Commit

Permalink
Merge pull request #1 from TikeshFlint/tikesh/logx
Browse files Browse the repository at this point in the history
PR: to add logx in linea surge
  • Loading branch information
TikeshFlint authored Jun 29, 2024
2 parents fe96798 + edc683f commit aabf853
Show file tree
Hide file tree
Showing 5 changed files with 195 additions and 0 deletions.
16 changes: 16 additions & 0 deletions adapters/logx/Test/inputData.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
block_number,timestamp
1225615,1702663435
1226390,1702668085
1226727,1702670107
1226739,1702670179
1227501,1702674751
1228937,1702683367
1230227,1702691107
1230358,1702691893
1230522,1702692877
1231684,1702699849
1232131,1702702531
1233255,1702709275
1233562,1702711117
1234773,1702717857
1235892,1702724597
14 changes: 14 additions & 0 deletions adapters/logx/Test/outputDataDemo.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
block_number,timestamp,user_address,token_address,token_balance
1225615,1702663435,0x38f0403a832a28c065b8af30980ecd8a237fa561,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,33804080
1226390,1702668085,0xebca8b5c9a3bdfb9ddf6be4ca807ad20fd5274e1,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,10000000
1226727,1702670107,0xe39cfb6800bcfc2a522903d1d71a6c43a60f2ba7,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,50207906
1226739,1702670179,0xe74bc1aa5cb89c3fd6f66763fe433953bed19746,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,18000000
1227501,1702674751,0xc58a00cb288a67e13f285013201d15ebabe49679,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,6637500
1228937,1702683367,0xfeb0e86c7bee7a8299d3de2e8e86884d4b59553f,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,298383448
1230227,1702691107,0x03f139783ece1e975c162e9a765873718d8f4709,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,7314042
1230358,1702691893,0x7be19c4724f14de4a9765a749ca36dcc0e18620f,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,1983668673
1230522,1702692877,0x78fb7a449dd2521ff4e9374594287815b1bb97b4,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,27167978
1231684,1702699849,0x84cd2c9770e986ff761cec264913dd4a9e0df17b,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,503327661
1232131,1702702531,0xf76e31fcd9c00a9268c2cca1f3ab969e935c01f6,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,6049080
1233255,1702709275,0x79155bf78b240bf43849de68797acbf8eacc07b6,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,10000000
1233562,1702711117,0xa974890156a3649a23a6c0f2ebd77d6f7a7333d4,0x176211869ca2b568f2a7d4ee941e073a821ee1ff,16000000
30 changes: 30 additions & 0 deletions adapters/logx/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"name": "LogX",
"version": "1.0.0",
"description": "",
"main": "index.js",
"type": "commonjs",
"scripts": {
"start": "node dist/index.js",
"compile": "tsc",
"watch": "tsc -w",
"clear": "rm -rf dist",
"test": "node "
},
"keywords": [],
"author": "",
"license": "UNLICENSED",
"dependencies": {
"@babel/core": "^7.24.7",
"@babel/node": "^7.24.7",
"@babel/preset-env": "^7.24.7",
"csv-parser": "^3.0.0",
"fast-csv": "^5.0.1",
"fs": "^0.0.1-security",
"node-fetch": "^2.7.0"
},
"devDependencies": {
"@types/node": "^20.11.17",
"typescript": "^5.3.3"
}
}
125 changes: 125 additions & 0 deletions adapters/logx/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
import fetch from 'node-fetch';
import * as fs from 'fs';
import { parse, writeToStream } from 'fast-csv';

type OutputDataSchemaRow = {
block_number: number;
timestamp: number;
user_address: string;
token_address: string;
token_balance: number;
};

interface BlockData {
blockNumber: number;
blockTimestamp: number;
}

const LOGX_SUBGRAPH_QUERY_URL = 'https://api.goldsky.com/api/public/project_clxspa1gpqpvl01w65jr93p57/subgraphs/LlpManager-linea/1.0.2/gn';
const PAGE_SIZE = 1000;

const post = async (url: string, data: any): Promise<any> => {
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
body: JSON.stringify(data),
});
return await response.json();
};

const getPoolData = async (blockNumber: number, skipPage: number, blockTimestamp?: number): Promise<OutputDataSchemaRow[]> => {
const LOGX_POOL_QUERY = `
query LiquidityQuery {
addLiquidities(
skip: ${skipPage}
first: ${PAGE_SIZE},
where: { block_number: ${blockNumber}},
) {
id
account
token
amount
llpSupply
timestamp_
}
}
`;
const csvRows: OutputDataSchemaRow[] = [];
const responseJson = await post(LOGX_SUBGRAPH_QUERY_URL, { query: LOGX_POOL_QUERY });

for (const item of responseJson.data.addLiquidities) {
csvRows.push({
block_number: blockNumber,
timestamp: item.timestamp_,
user_address: item.account,
token_address: item.token,
token_balance: item.amount,
});
}

// Check if there are more records to fetch recursively
if (responseJson.data.addLiquidities.length === PAGE_SIZE) {
const nextPageRows = await getPoolData(blockNumber, skipPage + PAGE_SIZE, blockTimestamp);
csvRows.push(...nextPageRows);
}

return csvRows;
};

export const getUserTVLByBlock = async (blocks: BlockData) => {
const { blockNumber, blockTimestamp } = blocks;
// Retrieve data using block number and timestamp
const csvRows = await getPoolData(blockNumber, 0, blockTimestamp);
return csvRows;
};

const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];

return new Promise((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(parse({ headers: true }))
.on('error', error => reject(error))
.on('data', (row: any) => {
const blockNumber = parseInt(row.block_number, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber, blockTimestamp });
}
})
.on('end', () => resolve(blocks));
});
};

const fetchAndWriteToCsv = async (filePath: string, blocks: BlockData[]) => {
const allCsvRows: OutputDataSchemaRow[] = [];

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
allCsvRows.push(...result);
} catch (error) {
console.error(`An error occurred for block ${block.blockNumber}:`, error);
}
}

const fileExists = fs.existsSync(filePath);
const ws = fs.createWriteStream(filePath, { flags: 'a' });

writeToStream(ws, allCsvRows, { headers: !fileExists, includeEndRowDelimiter: true })
.on('finish', () => {
console.log(`CSV file '${filePath}' has been written successfully.`);
});
};

const inputFilePath = 'Test/inputData.csv';
const outputFilePath = 'Test/outputData.csv';

readBlocksFromCSV(inputFilePath).then(async (blocks) => {
await fetchAndWriteToCsv(outputFilePath, blocks);
}).catch((err) => {
console.error('Error reading CSV file:', err);
});
10 changes: 10 additions & 0 deletions adapters/logx/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"compilerOptions": {
"esModuleInterop": true,
"module": "CommonJS",
"target": "ESNext",
"strict": true,
"moduleResolution": "node",
"forceConsistentCasingInFileNames": true
}
}

0 comments on commit aabf853

Please sign in to comment.