Skip to content

Commit

Permalink
Merge pull request #1916 from SparkDEX/master
Browse files Browse the repository at this point in the history
Add SparkDEX V3 dimensions adapter on Flare
  • Loading branch information
dtmkeng authored Sep 17, 2024
2 parents 29ae90a + 3fc9c77 commit edf9fca
Show file tree
Hide file tree
Showing 3 changed files with 134 additions and 0 deletions.
60 changes: 60 additions & 0 deletions dexs/sparkdex-v3/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import { Chain } from "@defillama/sdk/build/general";
import { CHAIN } from "../../helpers/chains";
import { getGraphDimensions2 } from "../../helpers/getUniSubgraph";
import { BreakdownAdapter } from "../../adapters/types";

const endpointsV3 = {
[CHAIN.FLARE]:
"https://api.goldsky.com/api/public/project_cly4708cqpcj601tt7gzf1jdj/subgraphs/sparkdex-v3/latest/gn",
};

const v3Graphs = getGraphDimensions2({
graphUrls: endpointsV3,
totalVolume: {
factory: "factories",
field: "totalVolumeUSD",
},
feesPercent: {
type: "fees",
ProtocolRevenue: 0,
HoldersRevenue: 0,
UserFees: 100, // User fees are 100% of collected fees
SupplySideRevenue: 100, // 100% of fees are going to LPs
Revenue: 0, // Set revenue to 0 as protocol fee is not set for all pools for now
},
});

const startTimeV3: { [key: string]: number } = {
[CHAIN.FLARE]: 1719878400,
};

const v3 = Object.keys(endpointsV3).reduce(
(acc, chain) => ({
...acc,
[chain]: {
fetch: v3Graphs(chain as Chain),
start: startTimeV3[chain],
meta: {
methodology: {
Fees: "Each pool charge between 0.01% to 1% fee",
UserFees: "Users pay between 0.01% to 1% fee",
Revenue: "0 to 1/4 of the fee goes to treasury",
HoldersRevenue: "None",
ProtocolRevenue: "Treasury receives a share of the fees",
SupplySideRevenue:
"Liquidity providers get most of the fees of all trades in their pools",
},
},
},
}),
{}
);

const adapter: BreakdownAdapter = {
version: 2,
breakdown: {
v3: v3,
},
};

export default adapter;
73 changes: 73 additions & 0 deletions fees/sparkdex-v3/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import { gql, request } from "graphql-request";
import type { ChainEndpoints, FetchV2 } from "../../adapters/types";
import { Adapter } from "../../adapters/types";
import { CHAIN } from "../../helpers/chains";

import { getTimestampAtStartOfDayUTC } from "../../utils/date";

const endpoints = {
[CHAIN.FLARE]:
"https://api.goldsky.com/api/public/project_cly4708cqpcj601tt7gzf1jdj/subgraphs/sparkdex-v3/latest/gn",
};

interface IFeeStat {
cumulativeFeeUsd: string;
feeUsd: string;
id: string;
}

const graphs = (graphUrls: ChainEndpoints) => {
const fetch: FetchV2 = async ({ chain, startTimestamp }) => {
const todaysTimestamp = getTimestampAtStartOfDayUTC(startTimestamp);

const graphQuery = gql`
query MyQuery {
feeStats(where: {timestamp: ${todaysTimestamp}, period: daily}) {
cumulativeFeeUsd
feeUsd
id
}
}
`;

const graphRes = await request(graphUrls[chain], graphQuery);
const feeStats: IFeeStat[] = graphRes.feeStats;

let dailyFeeUSD = BigInt(0);
let totalFeeUSD = BigInt(0);

feeStats.forEach((fee) => {
dailyFeeUSD += BigInt(fee.feeUsd);
totalFeeUSD += BigInt(fee.cumulativeFeeUsd);
});

const finalDailyFee = parseInt(dailyFeeUSD.toString()) / 1e18;
const finalTotalFee = parseInt(totalFeeUSD.toString()) / 1e18;

return {
timestamp: todaysTimestamp,
dailyFees: finalDailyFee.toString(),
totalFees: finalTotalFee.toString(),
};
};
return fetch;
};

const methodology = {
dailyFees: "Total cumulativeFeeUsd for specified chain for the given day",
};

const adapter: Adapter = {
version: 2,
adapter: {
[CHAIN.FLARE]: {
fetch: graphs(endpoints),
start: 1719878400,
meta: {
methodology,
},
},
},
};

export default adapter;
1 change: 1 addition & 0 deletions helpers/chains.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ export enum CHAIN {
CHAINFLIP = "chainflip",
CRONOS_ZKEVM = "cronos_zkevm",
LOGX = "logx_network",
FLARE = "flare",
}

// Don´t use
Expand Down

0 comments on commit edf9fca

Please sign in to comment.