gauravlochab commited on
Commit
c1b9444
·
1 Parent(s): 1520dc2

add all transcations across 3 chains

Browse files
Files changed (1) hide show
  1. app.py +207 -363
app.py CHANGED
@@ -6,380 +6,242 @@ from datetime import datetime, timedelta
6
  import json
7
  from web3 import Web3
8
 
 
9
  OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
 
 
10
 
11
- # Initialize a Web3 instance
12
- web3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL))
 
 
 
13
 
14
- # Check if connection is successful
15
- if not web3.is_connected():
16
- raise Exception("Failed to connect to the Optimism network.")
17
-
18
- # Contract address
19
- contract_address = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44'
20
 
21
- # Load the ABI from the provided JSON file
22
  with open('service_registry_abi.json', 'r') as abi_file:
23
  contract_abi = json.load(abi_file)
24
 
25
- # Now you can create the contract
26
- service_registry = web3.eth.contract(address=contract_address, abi=contract_abi)
27
-
28
- def get_transfers(integrator: str, wallet: str) -> str:
29
- url = f"https://li.quest/v1/analytics/transfers?integrator={integrator}&wallet={wallet}"
30
- headers = {"accept": "application/json"}
31
- response = requests.get(url, headers=headers)
32
- return response.json()
33
-
34
- def get_vanity_transactions(date):
35
- # Placeholder function to return 4 vanity transactions for the given date
36
- return pd.DataFrame({
37
- 'date': [date] * 4,
38
- 'sending_chain': ['Optimism'] * 4,
39
- 'transaction_count': [1] * 4 # Each vanity transaction counts as 1
40
- })
41
-
42
- def load_activity_checker_contract(w3, staking_token_address):
43
- """
44
- Loads the Staking Token and Activity Checker contracts.
45
-
46
- :param w3: Web3 instance
47
- :param staking_token_address: Address of the staking token contract
48
- :return: Tuple of (Staking Token contract instance, Activity Checker contract instance)
49
- """
50
- try:
51
- # Load the ABI file for the Staking Token contract
52
- with open('./contracts/StakingToken.json', "r", encoding="utf-8") as file:
53
- staking_token_data = json.load(file)
54
-
55
- staking_token_abi = staking_token_data.get("abi", [])
56
-
57
- # Create the Staking Token contract instance
58
- staking_token_contract = w3.eth.contract(address=staking_token_address, abi=staking_token_abi)
59
-
60
- # Get the activity checker contract address from staking_token_contract
61
- activity_checker_address = staking_token_contract.functions.activityChecker().call()
62
-
63
- # Load the ABI file for the Activity Checker contract
64
- with open('./contracts/StakingActivityChecker.json', "r", encoding="utf-8") as file:
65
- activity_checker_data = json.load(file)
66
-
67
- activity_checker_abi = activity_checker_data.get("abi", [])
68
-
69
- # Create the Activity Checker contract instance
70
- activity_checker_contract = w3.eth.contract(address=activity_checker_address, abi=activity_checker_abi)
71
-
72
- return staking_token_contract, activity_checker_contract
73
-
74
- except Exception as e:
75
- print(f"An error occurred while loading the contracts: {e}")
76
- raise
77
 
 
 
 
 
 
 
 
 
78
 
79
- def fetch_and_aggregate_transactions():
80
- total_services = service_registry.functions.totalSupply().call()
81
- aggregated_transactions = []
82
- daily_agent_counts = {}
83
- daily_agents_with_transactions = {}
84
 
85
- _staking_token_contract, activity_checker_contract = load_activity_checker_contract(web3, '0x88996bbdE7f982D93214881756840cE2c77C4992')
 
 
 
 
86
 
87
  for service_id in range(1, total_services + 1):
88
- service = service_registry.functions.getService(service_id).call()
89
-
90
- # Extract the list of agent IDs from the service data
91
  agent_ids = service[-1] # Assuming the last element is the list of agent IDs
 
92
 
93
- # Check if 25 is in the list of agent IDs
94
  if 25 in agent_ids:
95
- agent_address = service_registry.functions.getAgentInstances(service_id).call()[1][0]
96
- response_transfers = get_transfers("valory", agent_address)
97
- transfers = response_transfers.get("transfers", [])
98
- if isinstance(transfers, list):
99
- aggregated_transactions.extend(transfers)
100
-
101
- # Track the daily number of agents
102
- creation_event = service_registry.events.CreateService.create_filter(
103
- from_block=0, argument_filters={'serviceId': service_id, 'configHash': service[2]}
104
- ).get_all_entries()
105
-
106
- if creation_event:
107
- block_number = creation_event[0]['blockNumber']
108
- block = web3.eth.get_block(block_number)
109
- creation_timestamp = datetime.fromtimestamp(block['timestamp'])
110
- date_str = creation_timestamp.strftime('%Y-%m-%d')
111
-
112
- if date_str not in daily_agent_counts:
113
- daily_agent_counts[date_str] = set()
114
- if date_str not in daily_agents_with_transactions:
115
- daily_agents_with_transactions[date_str] = set()
116
-
117
- for agent_id in agent_ids:
118
- service_safe = service[1]
119
- print("agent_address",agent_address,"service_safe",service_safe)
120
- multisig_nonces = activity_checker_contract.functions.getMultisigNonces(service_safe).call()[0]
121
- if multisig_nonces > 0:
122
- daily_agents_with_transactions[date_str].add(agent_id)
123
- daily_agent_counts[date_str].add(agent_id)
124
-
125
- # Convert set to count
126
- daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()}
127
- daily_agents_with_transactions = {date: len(agents) for date, agents in daily_agents_with_transactions.items()}
128
-
129
- return aggregated_transactions, daily_agent_counts, daily_agents_with_transactions
130
-
131
- # Function to parse the transaction data and prepare it for visualization
132
- def process_transactions_and_agents(data):
133
- transactions, daily_agent_counts, daily_agents_with_transactions = data
134
-
135
- # Convert the data into a pandas DataFrame for easy manipulation
136
- rows = []
137
- for tx in transactions:
138
- # Normalize amounts
139
- sending_amount = float(tx["sending"]["amount"]) / (10 ** tx["sending"]["token"]["decimals"])
140
- receiving_amount = float(tx["receiving"]["amount"]) / (10 ** tx["receiving"]["token"]["decimals"])
141
-
142
- # Convert timestamps to datetime objects
143
- sending_timestamp = datetime.utcfromtimestamp(tx["sending"]["timestamp"])
144
- receiving_timestamp = datetime.utcfromtimestamp(tx["receiving"]["timestamp"])
145
-
146
- # Prepare row data
147
- rows.append({
148
- "transactionId": tx["transactionId"],
149
- "from_address": tx["fromAddress"],
150
- "to_address": tx["toAddress"],
151
- "sending_chain": tx["sending"]["chainId"],
152
- "receiving_chain": tx["receiving"]["chainId"],
153
- "sending_token_symbol": tx["sending"]["token"]["symbol"],
154
- "receiving_token_symbol": tx["receiving"]["token"]["symbol"],
155
- "sending_amount": sending_amount,
156
- "receiving_amount": receiving_amount,
157
- "sending_amount_usd": float(tx["sending"]["amountUSD"]),
158
- "receiving_amount_usd": float(tx["receiving"]["amountUSD"]),
159
- "sending_gas_used": int(tx["sending"]["gasUsed"]),
160
- "receiving_gas_used": int(tx["receiving"]["gasUsed"]),
161
- "sending_timestamp": sending_timestamp,
162
- "receiving_timestamp": receiving_timestamp,
163
- "date": sending_timestamp.date(), # Group by day
164
- "week": sending_timestamp.strftime('%Y-%m-%d') # Group by week
165
- })
166
-
167
- df_transactions = pd.DataFrame(rows)
168
- df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count'])
169
- df_agents_with_transactions = pd.DataFrame(list(daily_agents_with_transactions.items()), columns=['date', 'agent_count_with_transactions'])
 
 
 
 
170
 
171
- # Convert the date column to datetime
172
- df_agents['date'] = pd.to_datetime(df_agents['date'])
173
- df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date'])
174
-
175
- # Convert to week periods
176
- df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time)
177
- df_agents_with_transactions['week'] = df_agents_with_transactions['date'].dt.to_period('W').apply(lambda r: r.start_time)
178
 
179
- # Group by week
180
- df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index()
181
- df_agents_with_transactions_weekly = df_agents_with_transactions[['week', 'agent_count_with_transactions']].groupby('week').sum().reset_index()
182
-
183
- return df_transactions, df_agents_weekly, df_agents_with_transactions_weekly
184
-
185
- # Function to create visualizations based on the metrics
186
- def create_visualizations():
187
- transactions_data = fetch_and_aggregate_transactions()
188
- df_transactions, df_agents_weekly, df_agents_with_transactions_weekly = process_transactions_and_agents(transactions_data)
189
-
190
- # Map chain IDs to chain names
191
- chain_name_map = {
192
- 10: "Optimism",
193
- 8453: "Base",
194
- 1: "Ethereum"
 
 
195
  }
196
- df_transactions["sending_chain"] = df_transactions["sending_chain"].map(chain_name_map)
197
- df_transactions["receiving_chain"] = df_transactions["receiving_chain"].map(chain_name_map)
198
-
199
- # Ensure that chain IDs are strings for consistent grouping
200
- df_transactions["sending_chain"] = df_transactions["sending_chain"].astype(str)
201
- df_transactions["receiving_chain"] = df_transactions["receiving_chain"].astype(str)
202
- df_transactions['date'] = pd.to_datetime(df_transactions['date'])
203
 
204
- # Total transactions per chain per day
205
- tx_per_chain = df_transactions.groupby(["date", "sending_chain"]).size().reset_index(name="transaction_count")
 
 
 
206
 
207
- # Add vanity transactions for each day
208
- date_range = pd.date_range(start=df_transactions['date'].min(), end=df_transactions['date'].max())
209
- vanity_transactions = pd.concat([get_vanity_transactions(date) for date in date_range])
210
 
211
- # Combine actual and vanity transactions
212
- tx_per_chain = pd.concat([tx_per_chain, vanity_transactions]).groupby(["date", "sending_chain"]).sum().reset_index()
213
-
214
- # Keep Optimism transactions always in the bottom
215
- tx_per_chain["Order"] = tx_per_chain["sending_chain"].apply(lambda x: 0 if x == "Optimism" else 1)
216
- tx_per_chain = tx_per_chain.sort_values(by=["Order", "date"])
217
-
218
- # Define the color mapping for chains
219
- color_map = {
220
- "Optimism": "blue",
221
- "Ethereum": "darkgreen",
222
- "Base": "purple"
223
  }
224
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
225
  fig_tx_chain = px.bar(
226
- tx_per_chain,
227
- x="date",
228
- y="transaction_count",
229
- color="sending_chain",
230
- title="Chain Daily Activity: Transactions",
231
- labels={"sending_chain": "Transaction Chain", "transaction_count": "Daily Transaction Nr"},
232
  barmode="stack",
233
- color_discrete_map=color_map
234
  )
235
  fig_tx_chain.update_layout(
236
- xaxis_title=None,
237
- yaxis=dict(tickmode='linear', tick0=0, dtick=1),
238
- xaxis=dict(
239
- tickmode='array',
240
- tickvals=tx_per_chain['date'],
241
- ticktext=tx_per_chain['date'].dt.strftime('%y-%m-%d'),
242
- tickangle=0,
243
- ),
244
- bargap=0.8,
245
- height=700,
246
- )
247
- fig_tx_chain.update_xaxes(tickformat="%Y-%m-%d")
248
-
249
- # Identify swap transactions
250
- df_transactions["is_swap"] = df_transactions.apply(lambda x: x["sending_token_symbol"] != x["receiving_token_symbol"], axis=1)
251
-
252
- # Total swaps per chain per day
253
- swaps_per_chain = df_transactions[df_transactions["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count")
254
- fig_swaps_chain = px.bar(
255
- swaps_per_chain,
256
- x="date",
257
- y="swap_count",
258
- color="sending_chain",
259
- title="Chain Daily Activity: Swaps",
260
- labels={"sending_chain": "Transaction Chain", "swap_count": "Daily Swap Nr"},
261
- barmode="stack",
262
- color_discrete_map=color_map
263
- )
264
- fig_swaps_chain.update_layout(
265
- xaxis_title=None,
266
- yaxis=dict(tickmode='linear', tick0=0, dtick=1),
267
- xaxis=dict(
268
- tickmode='array',
269
- tickvals=swaps_per_chain['date'],
270
- ticktext=swaps_per_chain['date'].dt.strftime('%y-%m-%d'),
271
- tickangle=0,
272
- ),
273
- bargap=0.8,
274
- height=700,
275
- )
276
- fig_swaps_chain.update_xaxes(tickformat="%Y-%m-%d")
277
-
278
- # Identify bridge transactions
279
- df_transactions["is_bridge"] = df_transactions.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1)
280
-
281
- # Total bridges per chain per day
282
- bridges_per_chain = df_transactions[df_transactions["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count")
283
- fig_bridges_chain = px.bar(
284
- bridges_per_chain,
285
- x="date",
286
- y="bridge_count",
287
- color="sending_chain",
288
- title="Chain Daily Activity: Bridges",
289
- labels={"sending_chain": "Transaction Chain", "bridge_count": "Daily Bridge Nr"},
290
- barmode="stack",
291
- color_discrete_map=color_map
292
- )
293
- fig_bridges_chain.update_layout(
294
- xaxis_title=None,
295
- yaxis=dict(tickmode='linear', tick0=0, dtick=1),
296
- xaxis=dict(
297
- tickmode='array',
298
- tickvals=bridges_per_chain['date'],
299
- ticktext=bridges_per_chain['date'].dt.strftime('%y-%m-%d'),
300
- tickangle=0,
301
- ),
302
- bargap=0.8,
303
- height=700,
304
- )
305
- fig_bridges_chain.update_xaxes(tickformat="%Y-%m-%d")
306
-
307
- # Investment per agent per day
308
- investment_per_agent = df_transactions.groupby(["date", "from_address", "sending_chain"])["sending_amount_usd"].sum().reset_index()
309
- fig_investment_agent = px.bar(
310
- investment_per_agent,
311
- x="date",
312
- y="sending_amount_usd",
313
- color="sending_chain",
314
- title="Amount of Investment (USD) per Day",
315
- labels={"sending_chain": "Transaction Chain", "sending_amount_usd": "Investment Amount (USD)"},
316
- barmode="stack",
317
- color_discrete_map=color_map
318
- )
319
- fig_investment_agent.update_layout(
320
- xaxis_title=None,
321
- yaxis=dict(
322
- title="Investment Amount (USD)",
323
- tickmode='auto',
324
- nticks=10,
325
- tickformat='.2f' # Show 2 decimal places
326
- ),
327
- xaxis=dict(
328
- tickmode='array',
329
- tickvals=investment_per_agent['date'],
330
- ticktext=investment_per_agent['date'].dt.strftime('%y-%m-%d'),
331
- tickangle=0,
332
- ),
333
- bargap=0.8,
334
- height=700,
335
- )
336
- fig_investment_agent.update_xaxes(tickformat="%Y-%m-%d")
337
-
338
- # Number of agents per week
339
- fig_agents_daily = px.bar(
340
- df_agents_weekly,
341
- x="week",
342
- y="agent_count",
343
- title="Number of Agents Registered WoW",
344
- labels={"week": "Week", "agent_count": "Number of Agents"},
345
- color_discrete_sequence=["purple"]
346
- )
347
- fig_agents_daily.update_layout(
348
- xaxis_title=None,
349
- yaxis=dict(tickmode='linear', tick0=0, dtick=1),
350
- xaxis=dict(
351
- tickmode='array',
352
- tickvals=df_agents_weekly['week'],
353
- ticktext=df_agents_weekly['week'].dt.strftime('%y-%m-%d'),
354
- tickangle=0
355
- ),
356
- bargap=0.8,
357
  height=700,
358
  )
359
 
360
- # Number of agents with transactions per week
361
- fig_agents_with_transactions_daily = px.bar(
362
- df_agents_with_transactions_weekly,
363
- x="week",
364
- y="agent_count_with_transactions",
365
- title="Agents With at Least 1 Transaction WoW",
366
- labels={"week": "Week", "agent_count_with_transactions": "Number of Agents with Transactions"},
367
- color_discrete_sequence=["darkgreen"]
368
- )
369
- fig_agents_with_transactions_daily.update_layout(
370
- xaxis_title=None,
371
- yaxis=dict(tickmode='linear', tick0=0, dtick=1),
372
- xaxis=dict(
373
- tickmode='array',
374
- tickvals=df_agents_with_transactions_weekly['week'],
375
- ticktext=df_agents_with_transactions_weekly['week'].dt.strftime('%y-%m-%d'),
376
- tickangle=0
377
- ),
378
- bargap=0.8,
379
- height=700,
380
- )
381
 
382
- return fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent, fig_agents_daily, fig_agents_with_transactions_daily
383
 
384
  # Gradio interface
385
  def dashboard():
@@ -388,29 +250,11 @@ def dashboard():
388
 
389
  # Fetch and display visualizations
390
  with gr.Tab("Transactions"):
391
- fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
392
  gr.Plot(fig_tx_chain)
393
-
394
- with gr.Tab("Swaps"):
395
- fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
396
- gr.Plot(fig_swaps_chain)
397
-
398
- with gr.Tab("Bridges"):
399
- fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
400
- gr.Plot(fig_bridges_chain)
401
 
402
- with gr.Tab("Investment"):
403
- fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
404
- gr.Plot(fig_investment_agent)
405
-
406
- with gr.Tab("Agents Week Over Week"):
407
- fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
408
- gr.Plot(fig_agents_daily)
409
 
410
- with gr.Tab("Agents with Transactions Week Over Week"):
411
- fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent, fig_agents_daily, fig_agents_with_transactions_daily = create_visualizations()
412
- gr.Plot(fig_agents_with_transactions_daily)
413
-
414
  return demo
415
 
416
  # Launch the dashboard
 
6
  import json
7
  from web3 import Web3
8
 
9
+ # RPC URLs
10
  OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
11
+ BASE_RPC_URL = 'https://base-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
12
+ ETH_RPC_URL = 'https://eth-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
13
 
14
+ # Initialize Web3 instances
15
+ print("Initializing Web3 instances...")
16
+ web3_optimism = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL))
17
+ web3_base = Web3(Web3.HTTPProvider(BASE_RPC_URL))
18
+ web3_eth = Web3(Web3.HTTPProvider(ETH_RPC_URL))
19
 
20
+ # Contract addresses for service registries
21
+ contract_address_optimism = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44'
22
+ contract_address_base = '0x3C1fF68f5aa342D296d4DEe4Bb1cACCA912D95fE'
23
+ contract_address_eth = '0x48b6af7B12C71f09e2fC8aF4855De4Ff54e775cA'
 
 
24
 
25
+ # Load the ABI from a local JSON file
26
  with open('service_registry_abi.json', 'r') as abi_file:
27
  contract_abi = json.load(abi_file)
28
 
29
+ # Create the contract instances
30
+ service_registry_optimism = web3_optimism.eth.contract(address=contract_address_optimism, abi=contract_abi)
31
+ service_registry_base = web3_base.eth.contract(address=contract_address_base, abi=contract_abi)
32
+ service_registry_eth = web3_eth.eth.contract(address=contract_address_eth, abi=contract_abi)
33
+ print("Service registry contracts loaded.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
+ # Check if connection is successful
36
+ if not web3_optimism.is_connected():
37
+ raise Exception("Failed to connect to the Optimism network.")
38
+ if not web3_base.is_connected():
39
+ raise Exception("Failed to connect to the Base network.")
40
+ if not web3_eth.is_connected():
41
+ raise Exception("Failed to connect to the ETH network.")
42
+ print("Successfully connected to Ethereum, Optimism, and Base networks.")
43
 
 
 
 
 
 
44
 
45
+ def fetch_service_safes(web3, registry_contract):
46
+ print("\nFetching service safes...")
47
+ total_services = registry_contract.functions.totalSupply().call()
48
+ print(f"Total services: {total_services}")
49
+ service_safes = set()
50
 
51
  for service_id in range(1, total_services + 1):
52
+ print(f"Processing service ID: {service_id}")
53
+ service = registry_contract.functions.getService(service_id).call()
 
54
  agent_ids = service[-1] # Assuming the last element is the list of agent IDs
55
+ print(f"Agent IDs: {agent_ids}")
56
 
 
57
  if 25 in agent_ids:
58
+ agent_address = registry_contract.functions.getAgentInstances(service_id).call()
59
+ service_safe = service[1]
60
+ print(f"Found agent_address: {agent_address}")
61
+ print(f"Found service safe: {service_safe}")
62
+ service_safes.add(service_safe)
63
+
64
+ print(f"Total service safes found: {len(service_safes)}")
65
+ return service_safes
66
+
67
+ # Fetch service safes for each network
68
+ service_safes_optimism = fetch_service_safes(web3_optimism, service_registry_optimism)
69
+ service_safes_base = fetch_service_safes(web3_base, service_registry_base)
70
+ service_safes_eth = fetch_service_safes(web3_eth, service_registry_eth)
71
+ service_safes_eth = {safe for safe in service_safes_eth if safe.lower() != '0x0000000000000000000000000000000000000000'}
72
+
73
+ def get_block_range_for_date(chain_id, date_str, api_key, base_url):
74
+ """Get the block range for a specific date."""
75
+ target_date = datetime.strptime(date_str, "%Y-%m-%d")
76
+ start_of_day = datetime.combine(target_date, datetime.min.time())
77
+ end_of_day = datetime.combine(target_date, datetime.max.time())
78
+
79
+ start_timestamp = int(start_of_day.timestamp())
80
+ end_timestamp = int(end_of_day.timestamp())
81
+
82
+ # Get start block
83
+ start_response = requests.get(
84
+ f"{base_url}?module=block&action=getblocknobytime&timestamp={start_timestamp}&closest=before&apikey={api_key}"
85
+ )
86
+ if start_response.status_code == 200:
87
+ start_data = start_response.json()
88
+ start_block = start_data.get('result')
89
+ else:
90
+ print(f"Error fetching start block for {date_str} on chain {chain_id}")
91
+ return None, None
92
+
93
+ if start_block is None:
94
+ print(f"No start block found for chain {chain_id} on {date_str}")
95
+ return None, None
96
+ print(f"Start block for chain {chain_id} on {date_str}: {start_block}")
97
+
98
+ # Get end block
99
+ end_response = requests.get(
100
+ f"{base_url}?module=block&action=getblocknobytime&timestamp={end_timestamp}&closest=before&apikey={api_key}"
101
+ )
102
+ if end_response.status_code == 200:
103
+ end_data = end_response.json()
104
+ end_block = end_data.get('result')
105
+ else:
106
+ print(f"Error fetching end block for {date_str} on chain {chain_id}")
107
+ return None, None
108
+
109
+ if end_block is None:
110
+ print(f"No end block found for chain {chain_id} on {date_str}")
111
+ return None, None
112
+ print(f"End block for chain {chain_id} on {date_str}: {end_block}")
113
+
114
+ return start_block, end_block
115
+
116
+ def get_transactions(api_keys, wallet_address, chain_name, start_block, end_block):
117
+ """Retrieve transactions for the given wallet address, chain, and block range using the Etherscan or similar API."""
118
+ base_url = {
119
+ 'optimism': "https://api-optimistic.etherscan.io/api",
120
+ 'base': "https://api.basescan.org/api",
121
+ 'ethereum': "https://api.etherscan.io/api"
122
+ }.get(chain_name)
123
+
124
+ if not base_url:
125
+ print(f"Invalid chain name: {chain_name}")
126
+ return []
127
+
128
+ params = {
129
+ 'module': 'account',
130
+ 'action': 'txlist',
131
+ 'address': wallet_address,
132
+ 'startblock': start_block,
133
+ 'endblock': end_block,
134
+ 'sort': 'asc',
135
+ 'apikey': api_keys.get(chain_name)
136
+ }
137
 
138
+ response = requests.get(base_url, params=params)
139
+ data = response.json()
140
+ if data['status'] != '1':
141
+ print(f"Error: {data['message']}")
142
+ return []
143
+
144
+ valid_transactions = [tx for tx in data['result'] if tx['isError'] == '0']
145
 
146
+ return valid_transactions
147
+
148
+ def date_range(start_date, end_date):
149
+ """Generates a range of dates from start_date to end_date inclusive."""
150
+ start_dt = datetime.strptime(start_date, "%Y-%m-%d")
151
+ end_dt = datetime.strptime(end_date, "%Y-%m-%d")
152
+ delta = timedelta(days=1)
153
+ current_dt = start_dt
154
+ while current_dt <= end_dt:
155
+ yield current_dt.strftime("%Y-%m-%d")
156
+ current_dt += delta
157
+
158
+ def fetch_transactions():
159
+ # User inputs
160
+ api_keys = {
161
+ 'optimism': 'XQ72JA5XZ51QC7TG1W295AAIF4KTV92K1K',
162
+ 'base': '4BFQMVW1QUKEPVDA4VW711CF4462682CY8',
163
+ 'ethereum': '3GRYJGX55W3QWCEKGREF4H53AFHCAIVVR7'
164
  }
 
 
 
 
 
 
 
165
 
166
+ base_urls = {
167
+ 10: "https://api-optimistic.etherscan.io/api",
168
+ 8453: "https://api.basescan.org/api",
169
+ 1: "https://api.etherscan.io/api"
170
+ }
171
 
172
+ start_date = '2024-09-19' # Starting date
173
+ current_date = datetime.now().strftime('%Y-%m-%d') # Till present date
 
174
 
175
+ chains = {
176
+ 10: ('optimism', service_safes_optimism), # Optimism chain ID and service safes
177
+ 8453: ('base', service_safes_base), # Base chain ID and service safes
178
+ 1: ('ethereum', service_safes_eth) # Ethereum mainnet chain ID and service safes
 
 
 
 
 
 
 
 
179
  }
180
 
181
+ all_transactions = [] # List to hold all transactions
182
+
183
+ for chain_id, (chain_name, service_safes) in chains.items():
184
+ base_url = base_urls[chain_id]
185
+ api_key = api_keys[chain_name]
186
+
187
+ for safe_address in service_safes:
188
+ print(f"\nProcessing {chain_name.capitalize()} for safe address {safe_address}...")
189
+ for single_date in date_range(start_date, current_date):
190
+ start_block, end_block = get_block_range_for_date(chain_id, single_date, api_key, base_url)
191
+ if start_block is None or end_block is None:
192
+ print(f"Skipping date {single_date} for chain {chain_name} due to missing block data.")
193
+ continue
194
+
195
+ print(f"Start Block: {start_block}, End Block: {end_block} for date {single_date}")
196
+
197
+ transactions = get_transactions(api_keys, safe_address, chain_name, start_block, end_block)
198
+
199
+ if transactions:
200
+ print(f"Found {len(transactions)} transactions on {single_date} for {chain_name.capitalize()} safe address {safe_address}:")
201
+ for tx in transactions:
202
+ tx_time = datetime.fromtimestamp(int(tx['timeStamp']))
203
+ all_transactions.append({
204
+ 'chain': chain_name,
205
+ 'safe_address': safe_address,
206
+ 'date': single_date,
207
+ 'transaction_hash': tx['hash'],
208
+ 'timestamp': tx_time,
209
+ 'from': tx['from'],
210
+ 'to': tx['to'],
211
+ 'value_eth': int(tx['value']) / 1e18 # Convert value to ETH
212
+ })
213
+ else:
214
+ print(f"No transactions found for safe address {safe_address} on {single_date} on {chain_name.capitalize()}.")
215
+
216
+ # Convert the collected transactions into a DataFrame
217
+ df_transactions_new = pd.DataFrame(all_transactions)
218
+ return df_transactions_new
219
+
220
+ def create_transcation_visualizations():
221
+ df_transactions_new = fetch_transactions()
222
+ df_transactions_new.to_csv('daily_transactions_new.csv', index=False)
223
+
224
+ df_transactions_new['timestamp'] = pd.to_datetime(df_transactions_new['timestamp'])
225
+
226
+ # Group by date and chain, count transactions
227
+ daily_counts = df_transactions_new.groupby([df_transactions_new['timestamp'].dt.date, 'chain']).size().unstack(fill_value=0)
228
+
229
+ # Set up the plot
230
  fig_tx_chain = px.bar(
231
+ daily_counts,
 
 
 
 
 
232
  barmode="stack",
233
+ title="Chain Daily Activity: Transactions"
234
  )
235
  fig_tx_chain.update_layout(
236
+ xaxis_title="Date",
237
+ yaxis_title="Daily Transaction Nr",
238
+ legend_title="Transaction Chain",
239
+ xaxis_tickformat="%Y-%m-%d",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
240
  height=700,
241
  )
242
 
243
+ return fig_tx_chain
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
244
 
 
245
 
246
  # Gradio interface
247
  def dashboard():
 
250
 
251
  # Fetch and display visualizations
252
  with gr.Tab("Transactions"):
253
+ fig_tx_chain = create_transcation_visualizations()
254
  gr.Plot(fig_tx_chain)
 
 
 
 
 
 
 
 
255
 
256
+ # Add more tabs as needed...
 
 
 
 
 
 
257
 
 
 
 
 
258
  return demo
259
 
260
  # Launch the dashboard