Skip to content

Commit

Permalink
metric version 2
Browse files Browse the repository at this point in the history
  • Loading branch information
naik-ai committed Sep 23, 2024
1 parent 4af5b28 commit 6ba4cb0
Show file tree
Hide file tree
Showing 10 changed files with 710 additions and 216 deletions.
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ sqlalchemy = "*"
dbt = "*"
dbt-core = "*"
dbt-bigquery = "*"
bs4 = "*"

[dev-packages]
black="*"
Expand Down
421 changes: 229 additions & 192 deletions Pipfile.lock

Large diffs are not rendered by default.

Empty file.
86 changes: 86 additions & 0 deletions src/streamlit_everclear/chains_assets_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import requests
import pandas as pd
from bs4 import BeautifulSoup


class ChainsAssetsMetadata:
def __init__(self, url: str):
self.url = url

def pull_registered_assets_data(self) -> pd.DataFrame:
"""
Fetches and parses the registered assets table from the specified URL.
Returns:
pd.DataFrame: A DataFrame containing the registered assets metadata.
"""
# Step 1: Fetch the webpage content
response = requests.get(self.url)
response.raise_for_status() # Raises HTTPError for bad responses
content = response.content

# Step 2: Parse the HTML content using BeautifulSoup
soup = BeautifulSoup(content, "html.parser")

# Step 3: Locate the 'Registered Assets' section and find the table
registered_assets_header = soup.find("h2", id="registered-assets")
if not registered_assets_header:
raise ValueError(
"Couldn't find the 'Registered Assets' section in the HTML."
)

# Assuming the table is immediately after the header
table = registered_assets_header.find_next("table")
if not table:
raise ValueError(
"Couldn't find the table under the 'Registered Assets' section."
)

# Step 4: Loop through the rows and columns of the table to gather the data
table_data = []
for row in table.find_all("tr"):
cols = row.find_all("td")
if cols: # Ensure the row has data cells
cols_text = [col.get_text(strip=True) for col in cols]
table_data.append(cols_text)

# Step 5: Convert the data to a pandas DataFrame
df = pd.DataFrame(
table_data,
columns=[
"asset_name",
"symbol",
"decimals",
"domain_id",
"address",
"faucet",
"faucet_limit",
],
)

return df

def save_to_csv(self, df: pd.DataFrame, filepath: str):
"""
Saves the DataFrame to a specified CSV file.
Args:
df (pd.DataFrame): The DataFrame to save.
filepath (str): The path where the CSV will be saved.
"""
df.to_csv(filepath, index=False)
print(f"Data has been saved to '{filepath}'.")


if __name__ == "__main__":

# Initialize the class with the target URL
metadata_scraper = ChainsAssetsMetadata(
url="https://docs.everclear.org/resources/contracts/mainnet"
)

# Pull the registered assets data into a DataFrame
df_assets = metadata_scraper.pull_registered_assets_data()

# Save the DataFrame to a CSV file
metadata_scraper.save_to_csv(df_assets, "data/chains_assets_metadata.csv")
6 changes: 5 additions & 1 deletion src/streamlit_everclear/sql/Metric_12_Settlement_Time.sql
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,8 @@ SELECT

FROM public.intents i
WHERE i.settlement_status = 'SETTLED'
GROUP BY 1
GROUP BY 1



-- filter MM by origin intitiators
26 changes: 21 additions & 5 deletions src/streamlit_everclear/sql/Metric_8_Netting_Rate.sql
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,24 @@
-- Settled event is emmitted
-- when hub intent is settled that message is sent to destination, settled on destination then received the money



-- netted as matched by other itents: s = o + p.f
-- netted -> that is not an invoice ->


WITH raw AS (
SELECT
DATE_TRUNC('day', to_timestamp(i.origin_timestamp)) AS day,
COUNT(i.id) AS total_intents,
COUNT(CASE
WHEN (i.settlement_timestamp - i.origin_timestamp <= 3600)
AND i.settlement_status = 'SETTLED'
AND CAST(i.origin_ttl AS INTEGER) = 0
AND i.settlement_status = 'SETTLED'
THEN i.id
END) AS count_of_intents_within_1h,
COUNT(CASE
WHEN (i.settlement_timestamp - i.origin_timestamp <= 86400)
AND i.settlement_status = 'SETTLED'
AND CAST(i.origin_ttl AS INTEGER) = 0
AND i.settlement_status = 'SETTLED'
THEN i.id
END) AS count_of_intents_within_24h
FROM public.intents i
Expand All @@ -32,4 +36,16 @@ SELECT
ROUND(count_of_intents_within_1h * 100.0 / total_intents, 2) AS netting_rate_1h_percentage,
-- # netting rate 24h
ROUND(count_of_intents_within_24h * 100.0 / total_intents, 2) AS netting_rate_24h_percentage
FROM raw
FROM raw



-- netted intents:

SELECT
*
FROM public.intents i
FULL OUTER JOIN public.invoices inv
ON i.id = inv.id
WHERE inv.id IS NULL
-- status filter
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,11 @@ SELECT
SUM(fee_amount + discount) AS rebalancing_fee
FROM raw
GROUP BY 1
ORDER BY 1 DESC;
ORDER BY 1 DESC;


-- missing rewrds in the above query:
-- amounts from the hub invoice amount is thr rewards
-- rewards = origin_amount - hub_invoiced_amount -> accurate for intents that become invoices
-- discounts = hub_invoiced_amount - settlement_amount
-- rebalancing_fee = protocol_fee + discounts
20 changes: 3 additions & 17 deletions src/streamlit_everclear/sql/metric_3_Epoch_Discount.sql
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,7 @@ SELECT
DATE_TRUNC('day', to_timestamp(i.origin_timestamp)) as day,
-- each epoch is 30 mins so count avg epoch based on the time
ROUND(AVG(i.hub_settlement_epoch - i.hub_invoice_entry_epoch), 0) as discount_epoch
FROM public.invoices i
-- filter out the netted invoices
WHERE CAST(i.origin_ttl AS INTEGER) > 0 AND i.hub_status = 'DISPATCHED'
FROM public.invoices i
WHERE i.hub_status IN ('DISPATCHED', 'SETTLED')
GROUP BY 1




-- SELECT
-- DATE_TRUNC('day', to_timestamp(i.origin_timestamp)) as day,
-- -- AVG(i.origin_amount::float - i.settlement_amount::float) as discount_value,
-- -- each epoch is 30 mins so count avg epoch based on the time
-- ROUND(AVG(EXTRACT(EPOCH FROM (to_timestamp(hi.settlement_enqueued_timestamp) - to_timestamp(i.origin_timestamp))) / 1800), 0) as discount_epoch
-- FROM public.intents i
-- LEFT JOIN public.hub_intents hi ON i.id = hi.id
-- -- filter out the netted invoices
-- WHERE CAST(i.origin_ttl AS INTEGER) > 0 AND i.settlement_status = 'SETTLED'
-- GROUP BY 1
ORDER BY 1 DESC;
Loading

0 comments on commit 6ba4cb0

Please sign in to comment.