Spaces:
Runtime error
Runtime error
gauravlochab
commited on
Commit
·
f75291d
1
Parent(s):
755d55f
feat: Add daily value locked visualization to the dashboard
Browse files- app.py +42 -1
- app_value_locked.py +372 -0
- daily_value_locked.csv +4 -0
app.py
CHANGED
@@ -6,7 +6,7 @@ from datetime import datetime, timedelta
|
|
6 |
import json
|
7 |
from web3 import Web3
|
8 |
from app_trans_new import create_transcation_visualizations
|
9 |
-
|
10 |
OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
|
11 |
|
12 |
# Initialize a Web3 instance
|
@@ -178,6 +178,43 @@ def create_visualizations():
|
|
178 |
transactions_data = fetch_and_aggregate_transactions()
|
179 |
df_transactions, df_agents_weekly, df_agents_with_transactions_weekly = process_transactions_and_agents(transactions_data)
|
180 |
# Map chain IDs to chain names
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
181 |
chain_name_map = {
|
182 |
10: "Optimism",
|
183 |
8453: "Base",
|
@@ -327,6 +364,10 @@ def dashboard():
|
|
327 |
fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
|
328 |
gr.Plot(fig_agents_with_transactions_daily)
|
329 |
|
|
|
|
|
|
|
|
|
330 |
return demo
|
331 |
|
332 |
# Launch the dashboard
|
|
|
6 |
import json
|
7 |
from web3 import Web3
|
8 |
from app_trans_new import create_transcation_visualizations
|
9 |
+
from app_value_locked import fetch_daily_value_locked
|
10 |
OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
|
11 |
|
12 |
# Initialize a Web3 instance
|
|
|
178 |
transactions_data = fetch_and_aggregate_transactions()
|
179 |
df_transactions, df_agents_weekly, df_agents_with_transactions_weekly = process_transactions_and_agents(transactions_data)
|
180 |
# Map chain IDs to chain names
|
181 |
+
|
182 |
+
# Fetch daily value locked data
|
183 |
+
df_tvl = fetch_daily_value_locked()
|
184 |
+
|
185 |
+
# Calculate total value locked per chain per day
|
186 |
+
df_tvl["total_value_locked_usd"] = df_tvl["amount0_usd"] + df_tvl["amount1_usd"]
|
187 |
+
df_tvl_daily = df_tvl.groupby(["date", "chain_name"])["total_value_locked_usd"].sum().reset_index()
|
188 |
+
df_tvl_daily['date'] = pd.to_datetime(df_tvl_daily['date'])
|
189 |
+
# Plot total value locked
|
190 |
+
fig_tvl = px.bar(
|
191 |
+
df_tvl_daily,
|
192 |
+
x="date",
|
193 |
+
y="total_value_locked_usd",
|
194 |
+
color="chain_name",
|
195 |
+
title="Total Value Locked in Different Chains Daily",
|
196 |
+
labels={"date": "Date", "total_value_locked_usd": "Total Value Locked (USD)"},
|
197 |
+
barmode='stack',
|
198 |
+
color_discrete_map={
|
199 |
+
"Optimism": "blue",
|
200 |
+
"Base": "purple",
|
201 |
+
"Ethereum": "darkgreen"
|
202 |
+
}
|
203 |
+
)
|
204 |
+
fig_tvl.update_layout(
|
205 |
+
xaxis_title=None,
|
206 |
+
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
|
207 |
+
xaxis=dict(
|
208 |
+
tickmode='array',
|
209 |
+
tickvals=df_tvl_daily['date'],
|
210 |
+
ticktext=df_tvl_daily['date'].dt.strftime('%Y-%m-%d'),
|
211 |
+
tickangle=0,
|
212 |
+
),
|
213 |
+
bargap=0.8,
|
214 |
+
height=700,
|
215 |
+
)
|
216 |
+
fig_tvl.update_xaxes(tickformat="%Y-%m-%d")
|
217 |
+
|
218 |
chain_name_map = {
|
219 |
10: "Optimism",
|
220 |
8453: "Base",
|
|
|
364 |
fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
|
365 |
gr.Plot(fig_agents_with_transactions_daily)
|
366 |
|
367 |
+
with gr.Tab("Total Value Locked"):
|
368 |
+
fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily, fig_tvl = create_visualizations()
|
369 |
+
gr.Plot(fig_tvl)
|
370 |
+
|
371 |
return demo
|
372 |
|
373 |
# Launch the dashboard
|
app_value_locked.py
ADDED
@@ -0,0 +1,372 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import requests
|
3 |
+
from web3 import Web3
|
4 |
+
from datetime import datetime, timedelta
|
5 |
+
from typing import List, Dict, Optional
|
6 |
+
import pandas as pd
|
7 |
+
|
8 |
+
ADDRESSES = {
|
9 |
+
"optimism": {
|
10 |
+
"balancer_vault": "0xBA12222222228d8Ba445958a75a0704d566BF2C8",
|
11 |
+
"uniswap_position_manager": "0xC36442b4a4522E871399CD717aBDD847Ab11FE88"
|
12 |
+
},
|
13 |
+
"base": {
|
14 |
+
"balancer_vault": "0xBA12222222228d8Ba445958a75a0704d566BF2C8",
|
15 |
+
"uniswap_position_manager": "0x03a520b32C04BF3bEEf7BEb72E919cf822Ed34f1"
|
16 |
+
}
|
17 |
+
}
|
18 |
+
|
19 |
+
# Defining RPC URLs and initializing Web3 instances
|
20 |
+
OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
|
21 |
+
BASE_RPC_URL = 'https://base-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
|
22 |
+
|
23 |
+
print("Initializing Web3 instances...")
|
24 |
+
web3_optimism = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL))
|
25 |
+
web3_base = Web3(Web3.HTTPProvider(BASE_RPC_URL))
|
26 |
+
|
27 |
+
# Contract addresses for service registries
|
28 |
+
contract_address_optimism = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44'
|
29 |
+
contract_address_base = '0x3C1fF68f5aa342D296d4DEe4Bb1cACCA912D95fE'
|
30 |
+
|
31 |
+
# Load the ABI from a local JSON file
|
32 |
+
with open('./contracts/service_registry_abi.json', 'r') as abi_file:
|
33 |
+
contract_abi = json.load(abi_file)
|
34 |
+
|
35 |
+
# Create the contract instances
|
36 |
+
service_registry_optimism = web3_optimism.eth.contract(address=contract_address_optimism, abi=contract_abi)
|
37 |
+
service_registry_base = web3_base.eth.contract(address=contract_address_base, abi=contract_abi)
|
38 |
+
print("Service registry contracts loaded.")
|
39 |
+
|
40 |
+
def load_abi(filename):
|
41 |
+
with open(filename, 'r') as file:
|
42 |
+
contract_json = json.load(file)
|
43 |
+
return contract_json['abi']
|
44 |
+
|
45 |
+
UNISWAP_ABI = load_abi('/Users/gauravlochab/repos/HF-Metrics/optimus-metrics/contracts/NonfungiblePositionManager.json')
|
46 |
+
|
47 |
+
def get_logs(api_key, chain_name, from_block, to_block, contract_address, service_safe):
|
48 |
+
"""Fetch logs for the given contract and wallet address with specified topics."""
|
49 |
+
base_url = {
|
50 |
+
'optimism': "https://api-optimistic.etherscan.io/api",
|
51 |
+
'base': "https://api.basescan.org/api"
|
52 |
+
}.get(chain_name)
|
53 |
+
|
54 |
+
if not base_url:
|
55 |
+
print(f"Invalid chain name: {chain_name}")
|
56 |
+
return []
|
57 |
+
print('formatted safe address for topic', f"0x000000000000000000000000{service_safe[2:].lower()}")
|
58 |
+
params = {
|
59 |
+
'module': 'logs',
|
60 |
+
'action': 'getLogs',
|
61 |
+
'address': contract_address,
|
62 |
+
'fromBlock': from_block,
|
63 |
+
'toBlock': to_block,
|
64 |
+
'apikey': api_key,
|
65 |
+
'topic2': f"0x000000000000000000000000{service_safe[2:].lower()}" # Properly formatted topic2
|
66 |
+
}
|
67 |
+
|
68 |
+
response = requests.get(base_url, params=params)
|
69 |
+
data = response.json()
|
70 |
+
|
71 |
+
if data['status'] != '1':
|
72 |
+
print(f"Error: {data['message']}")
|
73 |
+
return []
|
74 |
+
|
75 |
+
return data['result']
|
76 |
+
|
77 |
+
def get_block_range_for_date(chain_id, date_str, api_key, base_url):
|
78 |
+
"""Get the block range for a specific date."""
|
79 |
+
target_date = datetime.strptime(date_str, "%Y-%m-%d")
|
80 |
+
start_of_day = datetime.combine(target_date, datetime.min.time())
|
81 |
+
end_of_day = datetime.combine(target_date, datetime.max.time())
|
82 |
+
|
83 |
+
start_timestamp = int(start_of_day.timestamp())
|
84 |
+
end_timestamp = int(end_of_day.timestamp())
|
85 |
+
|
86 |
+
# Get start block
|
87 |
+
start_response = requests.get(
|
88 |
+
f"{base_url}?module=block&action=getblocknobytime×tamp={start_timestamp}&closest=before&apikey={api_key}"
|
89 |
+
)
|
90 |
+
if start_response.status_code == 200:
|
91 |
+
start_data = start_response.json()
|
92 |
+
start_block = start_data.get('result')
|
93 |
+
else:
|
94 |
+
print(f"Error fetching start block for {date_str} on chain {chain_id}")
|
95 |
+
return None, None
|
96 |
+
|
97 |
+
if start_block is None:
|
98 |
+
print(f"No start block found for chain {chain_id} on {date_str}")
|
99 |
+
return None, None
|
100 |
+
print(f"Start block for chain {chain_id} on {date_str}: {start_block}")
|
101 |
+
|
102 |
+
# Get end block
|
103 |
+
end_response = requests.get(
|
104 |
+
f"{base_url}?module=block&action=getblocknobytime×tamp={end_timestamp}&closest=before&apikey={api_key}"
|
105 |
+
)
|
106 |
+
if end_response.status_code == 200:
|
107 |
+
end_data = end_response.json()
|
108 |
+
end_block = end_data.get('result')
|
109 |
+
else:
|
110 |
+
print(f"Error fetching end block for {date_str} on chain {chain_id}")
|
111 |
+
return None, None
|
112 |
+
|
113 |
+
if end_block is None:
|
114 |
+
print(f"No end block found for chain {chain_id} on {date_str}")
|
115 |
+
return None, None
|
116 |
+
print(f"End block for chain {chain_id} on {date_str}: {end_block}")
|
117 |
+
|
118 |
+
return start_block, end_block
|
119 |
+
|
120 |
+
def date_range(start_date, end_date):
|
121 |
+
"""Generates a range of dates from start_date to end_date inclusive."""
|
122 |
+
start_dt = datetime.strptime(start_date, "%Y-%m-%d")
|
123 |
+
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
124 |
+
delta = timedelta(days=1)
|
125 |
+
current_dt = start_dt
|
126 |
+
while current_dt <= end_dt:
|
127 |
+
yield current_dt.strftime("%Y-%m-%d")
|
128 |
+
current_dt += delta
|
129 |
+
|
130 |
+
def parse_transfer_log(chain_name, single_date, log):
|
131 |
+
# ERC-721 Transfer event signature
|
132 |
+
TRANSFER_EVENT_SIGNATURE = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"
|
133 |
+
|
134 |
+
if log['topics'][0].lower() == TRANSFER_EVENT_SIGNATURE.lower():
|
135 |
+
# This is a Transfer event
|
136 |
+
return {
|
137 |
+
"chain_name": chain_name,
|
138 |
+
"date": single_date,
|
139 |
+
"event": "Transfer",
|
140 |
+
"topic_0": log['topics'][0],
|
141 |
+
"from": "0x" + log['topics'][1][-40:], # Extract the address from the padded topic
|
142 |
+
"to": "0x" + log['topics'][2][-40:], # Extract the address from the padded topic
|
143 |
+
"token_id": int(log['topics'][3], 16) # Convert hex to decimal
|
144 |
+
}
|
145 |
+
|
146 |
+
# If no Transfer event is found
|
147 |
+
return None
|
148 |
+
|
149 |
+
def parse_pool_balance_log(chain_name, single_date, log):
|
150 |
+
POOL_BALANCE_SIGNATURE = "0xe5ce249087ce04f05a957192435400fd97868dba0e6a4b4c049abf8af80dae78"
|
151 |
+
if log['topics'][0].lower() == POOL_BALANCE_SIGNATURE.lower():
|
152 |
+
# Event PoolBalanceChanged: (bytes32 poolId, address liquidityProvider, address[] tokens, int256[] deltas, uint256[] protocolFeeAmounts)
|
153 |
+
return {
|
154 |
+
"chain_name": chain_name,
|
155 |
+
"date": single_date,
|
156 |
+
"event": "PoolBalanceChanged",
|
157 |
+
"poolId": log['topics'][1],
|
158 |
+
"liquidityProvider": "0x" + log['topics'][2][-40:], # Extract the address from the padded topic
|
159 |
+
"tokens": [log['data'][i:i + 64] for i in range(0, len(log['data']), 64)],
|
160 |
+
"deltas": [int(log['data'][i:i + 64], 16) for i in range(0, len(log['data']), 64)],
|
161 |
+
"protocolFeeAmounts": [int(log['data'][i:i + 64], 16) for i in range(0, len(log['data']), 64)]
|
162 |
+
}
|
163 |
+
|
164 |
+
def fetch_service_safes(web3, registry_contract):
|
165 |
+
print("\nFetching service safes...")
|
166 |
+
total_services = registry_contract.functions.totalSupply().call()
|
167 |
+
print(f"Total services: {total_services}")
|
168 |
+
service_safes = set()
|
169 |
+
|
170 |
+
for service_id in range(1, total_services + 1):
|
171 |
+
service = registry_contract.functions.getService(service_id).call()
|
172 |
+
agent_ids = service[-1] # Assuming the last element is the list of agent IDs
|
173 |
+
|
174 |
+
if 25 in agent_ids:
|
175 |
+
service_safe = service[1]
|
176 |
+
service_safes.add(service_safe)
|
177 |
+
|
178 |
+
print(f"Total service safes found: {len(service_safes)}")
|
179 |
+
return service_safes
|
180 |
+
|
181 |
+
def get_uniswap_v3_position(web3, contract_address, token_id):
|
182 |
+
"""Fetch the Uniswap V3 position details including `token0`, `token1`."""
|
183 |
+
position_manager_contract = web3.eth.contract(address=contract_address, abi=UNISWAP_ABI)
|
184 |
+
position_data = position_manager_contract.functions.positions(token_id).call()
|
185 |
+
return {
|
186 |
+
'token0': position_data[2],
|
187 |
+
'token1': position_data[3]
|
188 |
+
}
|
189 |
+
|
190 |
+
def get_uniswap_increase_liquidity(api_key, chain_name, token_id):
|
191 |
+
"""Fetch the IncreaseLiquidity event details including `amount0`, `amount1`."""
|
192 |
+
base_url = {
|
193 |
+
'optimism': "https://api-optimistic.etherscan.io/api",
|
194 |
+
'base': "https://api.basescan.org/api"
|
195 |
+
}.get(chain_name)
|
196 |
+
|
197 |
+
if not base_url:
|
198 |
+
print(f"Invalid chain name: {chain_name}")
|
199 |
+
return {}
|
200 |
+
|
201 |
+
params = {
|
202 |
+
'module': 'logs',
|
203 |
+
'action': 'getLogs',
|
204 |
+
'address': ADDRESSES[chain_name]['uniswap_position_manager'],
|
205 |
+
'topic0': "0x3067048beee31b25b2f1681f88dac838c8bba36af25bfb2b7cf7473a5847e35f",
|
206 |
+
'topic1': f"0x{token_id:064x}",
|
207 |
+
'apikey': api_key
|
208 |
+
}
|
209 |
+
|
210 |
+
response = requests.get(base_url, params=params)
|
211 |
+
data = response.json()
|
212 |
+
|
213 |
+
if data['status'] != '1':
|
214 |
+
print(f"Error: {data['message']}")
|
215 |
+
return {}
|
216 |
+
|
217 |
+
log = data['result'][0] if data['result'] else None
|
218 |
+
if not log:
|
219 |
+
return {}
|
220 |
+
|
221 |
+
# Extracting amounts from the data hex-string
|
222 |
+
data_hex = log['data'][2:] # Remove '0x' prefix
|
223 |
+
liquidity = int(data_hex[0:64], 16)
|
224 |
+
amount0 = int(data_hex[64:128], 16)
|
225 |
+
amount1 = int(data_hex[128:192], 16)
|
226 |
+
|
227 |
+
return {
|
228 |
+
'liquidity': liquidity,
|
229 |
+
'amount0': amount0,
|
230 |
+
'amount1': amount1
|
231 |
+
}
|
232 |
+
|
233 |
+
def get_token_decimals(web3, token_address):
|
234 |
+
"""Fetch the number of decimals for a given ERC-20 token."""
|
235 |
+
token_contract = web3.eth.contract(address=token_address, abi=[
|
236 |
+
{"constant":True,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":False,"stateMutability":"view","type":"function"}
|
237 |
+
])
|
238 |
+
return token_contract.functions.decimals().call()
|
239 |
+
|
240 |
+
def get_token_price_usd(chain, token_address):
|
241 |
+
chain_dict = {"optimism": "optimistic-ethereum", "base": "base", "ethereum": "ethereum"}
|
242 |
+
chain_name = chain_dict.get(chain, chain)
|
243 |
+
|
244 |
+
url = f"https://api.coingecko.com/api/v3/simple/token_price/{chain_name}?contract_addresses={token_address}&vs_currencies=usd"
|
245 |
+
|
246 |
+
headers = {
|
247 |
+
"accept": "application/json",
|
248 |
+
"x-cg-api-key": "CG-mf5xZnGELpSXeSqmHDLY2nNU"
|
249 |
+
}
|
250 |
+
|
251 |
+
response = requests.get(url, headers=headers)
|
252 |
+
data = response.json()
|
253 |
+
|
254 |
+
# Extract USD price
|
255 |
+
key = token_address.lower()
|
256 |
+
return data.get(key, {}).get('usd', None)
|
257 |
+
|
258 |
+
def load_existing_transactions(file_path):
|
259 |
+
"""Load existing transactions from a CSV file."""
|
260 |
+
try:
|
261 |
+
return pd.read_csv(file_path)
|
262 |
+
except FileNotFoundError:
|
263 |
+
return pd.DataFrame()
|
264 |
+
|
265 |
+
def get_last_processed_date(df):
|
266 |
+
"""Get the last processed date from the DataFrame."""
|
267 |
+
if df.empty:
|
268 |
+
return None
|
269 |
+
return df['date'].max()
|
270 |
+
|
271 |
+
# Main function to integrate the above based on provided context
|
272 |
+
def fetch_daily_value_locked():
|
273 |
+
api_keys = {
|
274 |
+
'optimism': 'XQ72JA5XZ51QC7TG1W295AAIF4KTV92K1K',
|
275 |
+
'base': '4BFQMVW1QUKEPVDA4VW711CF4462682CY8'
|
276 |
+
}
|
277 |
+
|
278 |
+
base_urls = {
|
279 |
+
10: "https://api-optimistic.etherscan.io/api",
|
280 |
+
8453: "https://api.basescan.org/api"
|
281 |
+
}
|
282 |
+
|
283 |
+
# Load existing transactions if any
|
284 |
+
existing_transactions_path = 'daily_value_locked.csv'
|
285 |
+
df_existing_transactions = load_existing_transactions(existing_transactions_path)
|
286 |
+
last_processed_date = get_last_processed_date(df_existing_transactions)
|
287 |
+
|
288 |
+
# Determine the start date based on the last processed date
|
289 |
+
start_date = (pd.to_datetime(last_processed_date) + timedelta(days=1)).strftime('%Y-%m-%d') if last_processed_date else '2024-09-19'
|
290 |
+
current_date = datetime.now().strftime('%Y-%m-%d') # Till present date
|
291 |
+
|
292 |
+
chains = {
|
293 |
+
10: ('optimism', 'uniswap_position_manager', 'balancer_vault'),
|
294 |
+
8453: ('base', 'uniswap_position_manager', 'balancer_vault')
|
295 |
+
}
|
296 |
+
|
297 |
+
# Example service safe addresses - Replace these with actual addresses
|
298 |
+
print("Fetching service safes for Optimism...")
|
299 |
+
service_safes_optimism = fetch_service_safes(web3_optimism, service_registry_optimism)
|
300 |
+
print(service_safes_optimism)
|
301 |
+
print("Fetching service safes for Base...")
|
302 |
+
service_safes_base = fetch_service_safes(web3_base, service_registry_base)
|
303 |
+
print(service_safes_base)
|
304 |
+
|
305 |
+
service_safes = {
|
306 |
+
'optimism': service_safes_optimism,
|
307 |
+
'base': service_safes_base
|
308 |
+
}
|
309 |
+
|
310 |
+
all_transactions = [] # List to hold all parsed logs
|
311 |
+
|
312 |
+
for chain_id, (chain_name, uniswap_contract_key, balancer_contract_key) in chains.items():
|
313 |
+
base_url = base_urls[chain_id]
|
314 |
+
api_key = api_keys[chain_name]
|
315 |
+
|
316 |
+
for service_safe in service_safes[chain_name]:
|
317 |
+
print(f"Checking service safe {service_safe} for chain {chain_name}")
|
318 |
+
for single_date in date_range(start_date, current_date):
|
319 |
+
start_block, end_block = get_block_range_for_date(chain_id, single_date, api_key, base_url)
|
320 |
+
if start_block is None or end_block is None:
|
321 |
+
print(f"Skipping date {single_date} for chain {chain_name} due to missing block data.")
|
322 |
+
continue
|
323 |
+
|
324 |
+
print(f"Start Block: {start_block}, End Block: {end_block} for date {single_date} on chain {chain_name} for service safe {service_safe}")
|
325 |
+
|
326 |
+
# Get logs for Uniswap and Balancer contracts
|
327 |
+
for contract_key, topic_key in [(uniswap_contract_key, "transfer"), (balancer_contract_key, "pool_balance_changed")]:
|
328 |
+
contract_address = ADDRESSES[chain_name][contract_key]
|
329 |
+
print(api_key, chain_name, start_block, end_block, contract_address, service_safe)
|
330 |
+
logs = get_logs(api_key, chain_name, start_block, end_block, contract_address, service_safe)
|
331 |
+
|
332 |
+
for log in logs:
|
333 |
+
parsed_log = parse_pool_balance_log(chain_name, single_date, log) if topic_key == "pool_balance_changed" else parse_transfer_log(chain_name, single_date, log)
|
334 |
+
if parsed_log:
|
335 |
+
if topic_key == "transfer":
|
336 |
+
# If the event is a Transfer event, fetch uniswap position details and increase liquidity event details
|
337 |
+
uniswap_v3_data = get_uniswap_v3_position(web3_base if chain_name == 'base' else web3_optimism, contract_address, parsed_log['token_id'])
|
338 |
+
increase_liquidity_data = get_uniswap_increase_liquidity(api_key, chain_name, parsed_log['token_id'])
|
339 |
+
|
340 |
+
token0_address = uniswap_v3_data['token0']
|
341 |
+
token1_address = uniswap_v3_data['token1']
|
342 |
+
decimals_token0 = get_token_decimals(web3_base if chain_name == 'base' else web3_optimism, token0_address)
|
343 |
+
decimals_token1 = get_token_decimals(web3_base if chain_name == 'base' else web3_optimism, token1_address)
|
344 |
+
print(decimals_token0,decimals_token1)
|
345 |
+
increase_liquidity_data['amount0'] /= 10**decimals_token0
|
346 |
+
increase_liquidity_data['amount1'] /= 10**decimals_token1
|
347 |
+
|
348 |
+
usd_price_token0 = get_token_price_usd(chain_name, token0_address)
|
349 |
+
usd_price_token1 = get_token_price_usd(chain_name, token1_address)
|
350 |
+
|
351 |
+
if usd_price_token0 is not None and usd_price_token1 is not None:
|
352 |
+
increase_liquidity_data['amount0_usd'] = increase_liquidity_data['amount0'] * usd_price_token0
|
353 |
+
increase_liquidity_data['amount1_usd'] = increase_liquidity_data['amount1'] * usd_price_token1
|
354 |
+
|
355 |
+
parsed_log.update(uniswap_v3_data)
|
356 |
+
parsed_log.update(increase_liquidity_data)
|
357 |
+
all_transactions.append(parsed_log)
|
358 |
+
|
359 |
+
# Convert to DataFrame and append to existing transactions
|
360 |
+
df_new_transactions = pd.DataFrame(all_transactions)
|
361 |
+
|
362 |
+
if not df_existing_transactions.empty:
|
363 |
+
all_data = pd.concat([df_existing_transactions, df_new_transactions])
|
364 |
+
else:
|
365 |
+
all_data = df_new_transactions
|
366 |
+
|
367 |
+
all_data.to_csv(existing_transactions_path, index=False)
|
368 |
+
print("Data saved to", existing_transactions_path)
|
369 |
+
|
370 |
+
return all_data
|
371 |
+
|
372 |
+
#print(df_transactions)
|
daily_value_locked.csv
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
chain_name,date,event,topic_0,from,to,token_id,token0,token1,liquidity,amount0,amount1,amount0_usd,amount1_usd
|
2 |
+
optimism,2024-10-07,Transfer,0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef,0x0000000000000000000000000000000000000000,0x5f0c4273ff97ae91fc8d2fc8621b5e37a741d1b1,826974,0x2218a117083f5B482B0bB821d27056Ba9c04b1D3,0xdFA46478F9e5EA86d57387849598dbFB2e964b02,13516644515555081265,8.98929230571145,20.324144854406946,9.97811445933971,10.184307041674192
|
3 |
+
base,2024-09-19,Transfer,0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef,0x0000000000000000000000000000000000000000,0xa13dfc6ddcff0b5b637e721ee83d6cf7e0676e73,963863,0x01CCF4941298a0b5AC4714c0E1799a2dF8387048,0x4200000000000000000000000000000000000006,1210264887099988865,359.58545796548844,0.0040734158306472,9.341785679831974,10.270669941235578
|
4 |
+
base,2024-09-26,Transfer,0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef,0x0000000000000000000000000000000000000000,0xac55eeb3fdacfdfae78c62caa58934900ad54ed2,978001,0x01CCF4941298a0b5AC4714c0E1799a2dF8387048,0x4200000000000000000000000000000000000006,1524286215726092055,469.7981777244338,0.004945631076533,12.205037194519935,12.469864740059744
|