gxurxv commited on
Commit
b8d3277
·
verified ·
1 Parent(s): d7be0ae

update app.py fro mode

Browse files
Files changed (1) hide show
  1. app.py +105 -253
app.py CHANGED
@@ -6,28 +6,43 @@ import plotly.express as px
6
  from datetime import datetime, timedelta
7
  import json
8
  from web3 import Web3
 
9
  from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
10
  from app_value_locked import fetch_daily_value_locked
11
- import os
12
 
 
13
  OPTIMISM_RPC_URL = os.getenv('OPTIMISM_RPC_URL')
14
- print('OPTIMISM_RPC_URL',OPTIMISM_RPC_URL)
15
- # Initialize a Web3 instance
16
- web3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL))
17
 
18
- # Check if connection is successful
19
- if not web3.is_connected():
20
- raise Exception("Failed to connect to the Optimism network.")
 
 
21
 
22
- # Contract address
23
- contract_address = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44'
 
 
 
24
 
25
  # Load the ABI from the provided JSON file
26
  with open('./contracts/service_registry_abi.json', 'r') as abi_file:
27
  contract_abi = json.load(abi_file)
28
 
29
- # Now you can create the contract
30
- service_registry = web3.eth.contract(address=contract_address, abi=contract_abi)
 
 
 
 
 
 
 
 
 
 
31
 
32
  def get_transfers(integrator: str, wallet: str) -> str:
33
  url = f"https://li.quest/v1/analytics/transfers?&wallet={wallet}&fromTimestamp=1726165800"
@@ -35,91 +50,48 @@ def get_transfers(integrator: str, wallet: str) -> str:
35
  response = requests.get(url, headers=headers)
36
  return response.json()
37
 
38
- def load_activity_checker_contract(w3, staking_token_address):
39
- """
40
- Loads the Staking Token and Activity Checker contracts.
41
-
42
- :param w3: Web3 instance
43
- :param staking_token_address: Address of the staking token contract
44
- :return: Tuple of (Staking Token contract instance, Activity Checker contract instance)
45
- """
46
- try:
47
- # Load the ABI file for the Staking Token contract
48
- with open('./contracts/StakingToken.json', "r", encoding="utf-8") as file:
49
- staking_token_data = json.load(file)
50
-
51
- staking_token_abi = staking_token_data.get("abi", [])
52
-
53
- # Create the Staking Token contract instance
54
- staking_token_contract = w3.eth.contract(address=staking_token_address, abi=staking_token_abi)
55
-
56
- # Get the activity checker contract address from staking_token_contract
57
- activity_checker_address = staking_token_contract.functions.activityChecker().call()
58
-
59
- # Load the ABI file for the Activity Checker contract
60
- with open('./contracts/StakingActivityChecker.json', "r", encoding="utf-8") as file:
61
- activity_checker_data = json.load(file)
62
-
63
- activity_checker_abi = activity_checker_data.get("abi", [])
64
-
65
- # Create the Activity Checker contract instance
66
- activity_checker_contract = w3.eth.contract(address=activity_checker_address, abi=activity_checker_abi)
67
-
68
- return staking_token_contract, activity_checker_contract
69
-
70
- except Exception as e:
71
- print(f"An error occurred while loading the contracts: {e}")
72
- raise
73
-
74
-
75
  def fetch_and_aggregate_transactions():
76
- total_services = service_registry.functions.totalSupply().call()
77
  aggregated_transactions = []
78
  daily_agent_counts = {}
79
- daily_agents_with_transactions = {}
80
-
81
- _staking_token_contract, activity_checker_contract = load_activity_checker_contract(web3, '0x88996bbdE7f982D93214881756840cE2c77C4992')
82
-
83
- for service_id in range(1, total_services + 1):
84
- service = service_registry.functions.getService(service_id).call()
85
- # Extract the list of agent IDs from the service data
86
- agent_ids = service[-1] # Assuming the last element is the list of agent IDs
87
-
88
- # Check if 40 is in the list of agent IDs
89
- if 40 in agent_ids or 25 in agent_ids:
90
- agent_address = service_registry.functions.getAgentInstances(service_id).call()[1][0]
91
- response_transfers = get_transfers("valory", agent_address)
92
- transfers = response_transfers.get("transfers", [])
93
- if isinstance(transfers, list):
94
- aggregated_transactions.extend(transfers)
95
-
96
- # Track the daily number of agents
97
- creation_event = service_registry.events.CreateService.create_filter(from_block=0, argument_filters={'serviceId': service_id}).get_all_entries()
98
- if creation_event:
99
- block_number = creation_event[0]['blockNumber']
100
- block = web3.eth.get_block(block_number)
101
- creation_timestamp = datetime.fromtimestamp(block['timestamp'])
102
- date_str = creation_timestamp.strftime('%Y-%m-%d')
103
- print("date_str",date_str)
104
- if date_str not in daily_agent_counts:
105
- daily_agent_counts[date_str] = set()
106
- if date_str not in daily_agents_with_transactions:
107
- daily_agents_with_transactions[date_str] = set()
108
-
109
- service_safe = service[1]
110
- print("agent_address",agent_address,"service_safe",service_safe)
111
- multisig_nonces = activity_checker_contract.functions.getMultisigNonces(service_safe).call()[0]
112
- if multisig_nonces > 0:
113
- daily_agents_with_transactions[date_str].add(agent_address)
114
- daily_agent_counts[date_str].add(agent_address)
115
- # Convert set to count
116
  daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()}
117
- daily_agents_with_transactions = {date: len(agents) for date, agents in daily_agents_with_transactions.items()}
118
- return aggregated_transactions, daily_agent_counts, daily_agents_with_transactions
119
 
120
  # Function to parse the transaction data and prepare it for visualization
121
  def process_transactions_and_agents(data):
122
- transactions, daily_agent_counts, daily_agents_with_transactions = data
123
 
124
  # Convert the data into a pandas DataFrame for easy manipulation
125
  rows = []
@@ -154,99 +126,35 @@ def process_transactions_and_agents(data):
154
  })
155
 
156
  df_transactions = pd.DataFrame(rows)
157
- breakpoint()
158
  df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count'])
159
- df_agents_with_transactions = pd.DataFrame(list(daily_agents_with_transactions.items()), columns=['date', 'agent_count_with_transactions'])
160
-
161
- # Convert the date column to datetime
162
  df_agents['date'] = pd.to_datetime(df_agents['date'])
163
- df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date'])
164
-
165
- # Convert to week periods
166
  df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time)
167
- df_agents_with_transactions['week'] = df_agents_with_transactions['date'].dt.to_period('W').apply(lambda r: r.start_time)
168
 
169
- # Group by week
170
  df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index()
171
- df_agents_with_transactions_weekly = df_agents_with_transactions[['week', 'agent_count_with_transactions']].groupby('week').sum().reset_index()
172
 
173
- return df_transactions, df_agents_weekly, df_agents_with_transactions_weekly, df_agents_with_transactions
174
 
175
  # Function to create visualizations based on the metrics
176
  def create_visualizations():
177
  transactions_data = fetch_and_aggregate_transactions()
178
- df_transactions, df_agents_weekly, df_agents_with_transactions_weekly, df_agents_with_transactions = process_transactions_and_agents(transactions_data)
179
- # Map chain IDs to chain names
180
-
181
- # Fetch daily value locked data
182
- df_tvl = fetch_daily_value_locked()
183
-
184
- # Calculate total value locked per chain per day
185
- df_tvl["total_value_locked_usd"] = df_tvl["amount0_usd"] + df_tvl["amount1_usd"]
186
- df_tvl_daily = df_tvl.groupby(["date", "chain_name"])["total_value_locked_usd"].sum().reset_index()
187
- df_tvl_daily['date'] = pd.to_datetime(df_tvl_daily['date'])
188
-
189
- # Filter out dates with zero total value locked
190
- df_tvl_daily = df_tvl_daily[df_tvl_daily["total_value_locked_usd"] > 0]
191
- chain_name_map = {
192
- "optimism": "Optimism",
193
- "base": "Base",
194
- "ethereum": "Ethereum"
195
- }
196
- df_tvl_daily["chain_name"] = df_tvl_daily["chain_name"].map(chain_name_map)
197
-
198
- # Plot total value locked
199
- fig_tvl = px.bar(
200
- df_tvl_daily,
201
- x="date",
202
- y="total_value_locked_usd",
203
- color="chain_name",
204
- opacity=0.7,
205
- title="Total Volume Invested in Pools in Different Chains Daily",
206
- labels={"date": "Date","chain_name": "Transaction Chain", "total_value_locked_usd": "Total Volume Invested (USD)"},
207
- barmode='stack',
208
- color_discrete_map={
209
- "Optimism": "blue",
210
- "Base": "purple",
211
- "Ethereum": "darkgreen"
212
- }
213
- )
214
- fig_tvl.update_layout(
215
- xaxis_title=None,
216
- yaxis=dict(tickmode='linear', tick0=0, dtick=1),
217
- xaxis=dict(
218
- tickmode='array',
219
- tickvals=df_tvl_daily['date'],
220
- ticktext=df_tvl_daily['date'].dt.strftime('%b %d'),
221
- tickangle=-45,
222
- ),
223
- bargap=0.6, # Increase gap between bar groups (0-1)
224
- bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
225
- height=600, # Specify width to prevent bars from being too wide
226
- margin=dict(l=50, r=50, t=50, b=50), # Add margins
227
- showlegend=True,
228
- template='plotly_white'
229
- )
230
- fig_tvl.update_xaxes(tickformat="%b %d")
231
-
232
 
233
  chain_name_map = {
234
  10: "Optimism",
235
  8453: "Base",
236
  1: "Ethereum",
 
237
  }
 
238
  df_transactions["sending_chain"] = df_transactions["sending_chain"].map(chain_name_map)
239
  df_transactions["receiving_chain"] = df_transactions["receiving_chain"].map(chain_name_map)
240
 
241
- # Ensure that chain IDs are strings for consistent grouping
242
  df_transactions["sending_chain"] = df_transactions["sending_chain"].astype(str)
243
  df_transactions["receiving_chain"] = df_transactions["receiving_chain"].astype(str)
244
  df_transactions['date'] = pd.to_datetime(df_transactions['date'])
 
245
 
246
- # Identify swap transactions
247
- df_transactions["is_swap"] = df_transactions.apply(lambda x: x["sending_token_symbol"] != x["receiving_token_symbol"], axis=1)
248
-
249
- # Total swaps per chain per day
250
  swaps_per_chain = df_transactions[df_transactions["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count")
251
  fig_swaps_chain = px.bar(
252
  swaps_per_chain,
@@ -260,7 +168,8 @@ def create_visualizations():
260
  color_discrete_map={
261
  "Optimism": "blue",
262
  "Ethereum": "darkgreen",
263
- "Base": "purple"
 
264
  }
265
  )
266
  fig_swaps_chain.update_layout(
@@ -269,14 +178,14 @@ def create_visualizations():
269
  yaxis=dict(tickmode='linear', tick0=0, dtick=1),
270
  xaxis=dict(
271
  tickmode='array',
272
- tickvals=[d for d in swaps_per_chain['date'] if d.weekday() == 0], # Show only Mondays
273
  ticktext=[d.strftime('%m-%d') for d in swaps_per_chain['date'] if d.weekday() == 0],
274
  tickangle=-45,
275
  ),
276
- bargap=0.6, # Increase gap between bar groups (0-1)
277
- bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
278
- height=600, # Specify width to prevent bars from being too wide
279
- margin=dict(l=50, r=50, t=50, b=50), # Add margins
280
  showlegend=True,
281
  legend=dict(
282
  yanchor="top",
@@ -288,11 +197,8 @@ def create_visualizations():
288
  )
289
  fig_swaps_chain.update_xaxes(tickformat="%m-%d")
290
 
291
- # Identify bridge transactions
292
- # Identify bridge transactions
293
  df_transactions["is_bridge"] = df_transactions.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1)
294
 
295
- # Total bridges per chain per day
296
  bridges_per_chain = df_transactions[df_transactions["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count")
297
  fig_bridges_chain = px.bar(
298
  bridges_per_chain,
@@ -306,7 +212,8 @@ def create_visualizations():
306
  color_discrete_map={
307
  "Optimism": "blue",
308
  "Ethereum": "darkgreen",
309
- "Base": "purple"
 
310
  }
311
  )
312
  fig_bridges_chain.update_layout(
@@ -315,14 +222,14 @@ def create_visualizations():
315
  yaxis=dict(tickmode='linear', tick0=0, dtick=1),
316
  xaxis=dict(
317
  tickmode='array',
318
- tickvals=[d for d in bridges_per_chain['date'] if d.weekday() == 0], # Show only Mondays
319
  ticktext=[d.strftime('%m-%d') for d in bridges_per_chain['date'] if d.weekday() == 0],
320
  tickangle=-45,
321
  ),
322
- bargap=0.6, # Increase gap between bar groups (0-1)
323
- bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
324
- height=600, # Specify width to prevent bars from being too wide
325
- margin=dict(l=50, r=50, t=50, b=50), # Add margins
326
  showlegend=True,
327
  legend=dict(
328
  yanchor="top",
@@ -333,46 +240,36 @@ def create_visualizations():
333
  template='plotly_white'
334
  )
335
  fig_bridges_chain.update_xaxes(tickformat="%m-%d")
 
336
 
337
- # Nr of agents registered daily and weekly
338
- # Convert 'date' column to datetime
339
- df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date'])
 
340
 
341
- # Calculate daily number of agents registered
342
- daily_agents_df = df_agents_with_transactions.groupby('date').size().reset_index(name='daily_agent_count')
343
-
344
- # Check for October 2, 2024 and update the value
345
- daily_agents_df.loc[daily_agents_df['date'] == '2024-10-02', 'daily_agent_count'] = 2
346
-
347
- # Calculate cumulative number of agents registered within the week up to each day
348
- df_agents_with_transactions['week_start'] = df_agents_with_transactions['date'].dt.to_period("W").apply(lambda r: r.start_time)
349
- cumulative_agents_df = df_agents_with_transactions.groupby(['week_start', 'date']).size().groupby(level=0).cumsum().reset_index(name='weekly_agent_count')
350
 
351
- # Check for October 2, 2024 and update the value
352
- cumulative_agents_df.loc[cumulative_agents_df['date'] == '2024-10-02', 'weekly_agent_count'] = 2
353
-
354
- # Combine the data to ensure both dataframes align for plotting
355
- combined_df = pd.merge(daily_agents_df, cumulative_agents_df, on='date', how='left')
356
-
357
- # Create the bar chart with side-by-side bars
358
  fig_agents_registered = go.Figure(data=[
359
  go.Bar(
360
  name='Daily nr of Registered Agents',
361
- x=combined_df['date'],
362
- y=combined_df['daily_agent_count'],
363
  opacity=0.7,
364
  marker_color='blue'
365
  ),
366
  go.Bar(
367
  name='Total Weekly Nr of Registered Agents',
368
- x=combined_df['date'],
369
- y=combined_df['weekly_agent_count'],
370
  opacity=0.7,
371
  marker_color='purple'
372
  )
373
  ])
374
 
375
- # Update layout to group bars side by side for each day
376
  fig_agents_registered.update_layout(
377
  xaxis_title='Date',
378
  yaxis_title='Number of Agents',
@@ -381,55 +278,18 @@ def create_visualizations():
381
  yaxis=dict(tickmode='linear', tick0=0, dtick=1),
382
  xaxis=dict(
383
  tickmode='array',
384
- tickvals=combined_df['date'],
385
- ticktext=[d.strftime("%b %d") for d in combined_df['date']],
386
  tickangle=-45
387
  ),
388
- bargap=0.6, # Increase gap between bar groups (0-1)
389
- height=600, # Specify width to prevent bars from being too wide
390
- margin=dict(l=50, r=50, t=50, b=50), # Add margins
391
  showlegend=True,
392
  template='plotly_white'
393
  )
394
 
395
- # Calculate weekly average daily active agents
396
- df_agents_with_transactions['day_of_week'] = df_agents_with_transactions['date'].dt.dayofweek
397
- df_agents_with_transactions_weekly_avg = df_agents_with_transactions.groupby(['week', 'day_of_week'])['agent_count_with_transactions'].mean().reset_index()
398
- df_agents_with_transactions_weekly_avg = df_agents_with_transactions_weekly_avg.groupby('week')['agent_count_with_transactions'].mean().reset_index()
399
- # Number of agents with transactions per week
400
- fig_agents_with_transactions_daily = px.bar(
401
- df_agents_with_transactions_weekly,
402
- x="week",
403
- opacity=0.7,
404
- y="agent_count_with_transactions",
405
- title="Daily Active Agents: Weekly Average Nr of agents with at least 1 transaction daily",
406
- labels={"week": "Week of", "agent_count_with_transactions": "Number of Agents with Transactions"},
407
- color_discrete_sequence=["darkgreen"]
408
- )
409
- fig_agents_with_transactions_daily.update_layout(
410
- title=dict(
411
- x=0.5,y=0.95,xanchor='center',yanchor='top'), # Adjust vertical position and Center the title
412
- yaxis=dict(tickmode='linear', tick0=0, dtick=1),
413
- xaxis=dict(
414
- tickmode='array',
415
- tickvals=df_agents_with_transactions_weekly_avg['week'],
416
- ticktext=df_agents_with_transactions_weekly_avg['week'].dt.strftime('%b %d'),
417
- tickangle=0
418
- ),
419
- bargap=0.6, # Increase gap between bar groups (0-1)
420
- bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
421
- height=600, # Specify width to prevent bars from being too wide
422
- margin=dict(l=50, r=50, t=50, b=50), # Add margins
423
- showlegend=True,
424
- legend=dict(
425
- yanchor="top",
426
- y=0.99,
427
- xanchor="right",
428
- x=0.99
429
- )
430
- )
431
-
432
- return fig_swaps_chain, fig_bridges_chain, fig_agents_registered, fig_agents_with_transactions_daily,fig_tvl
433
 
434
  # Gradio interface
435
  def dashboard():
@@ -439,28 +299,20 @@ def dashboard():
439
  fig_tx_chain = create_transcation_visualizations()
440
  gr.Plot(fig_tx_chain)
441
 
442
- fig_swaps_chain, fig_bridges_chain, fig_agents_registered, fig_agents_with_transactions_daily = create_visualizations()
443
- #Fetch and display visualizations
444
  with gr.Tab("Swaps Daily"):
445
  gr.Plot(fig_swaps_chain)
446
 
447
  with gr.Tab("Bridges Daily"):
448
- #fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations()
449
  gr.Plot(fig_bridges_chain)
450
 
451
  with gr.Tab("Nr of Agents Registered"):
452
- #fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations()
453
  gr.Plot(fig_agents_registered)
454
-
455
  with gr.Tab("DAA"):
456
  fig_agents_with_transactions_daily = create_active_agents_visualizations()
457
- #fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations()
458
- gr.Plot(fig_agents_with_transactions_daily)
459
-
460
- with gr.Tab("Total Value Locked"):
461
- #fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily, fig_tvl,fig_tvl = create_visualizations()
462
- gr.Plot(fig_tvl)
463
-
464
  return demo
465
 
466
  # Launch the dashboard
 
6
  from datetime import datetime, timedelta
7
  import json
8
  from web3 import Web3
9
+ import os
10
  from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
11
  from app_value_locked import fetch_daily_value_locked
12
+ # Load environment variables from .env file
13
 
14
+ # RPC URLs
15
  OPTIMISM_RPC_URL = os.getenv('OPTIMISM_RPC_URL')
16
+ MODE_RPC_URL = os.getenv('MODE_RPC_URL')
 
 
17
 
18
+ # Initialize Web3 instances
19
+ web3_instances = {
20
+ 'optimism': Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL)),
21
+ 'mode': Web3(Web3.HTTPProvider(MODE_RPC_URL))
22
+ }
23
 
24
+ # Contract addresses for service registries
25
+ contract_addresses = {
26
+ 'optimism': '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44',
27
+ 'mode': '0x3C1fF68f5aa342D296d4DEe4Bb1cACCA912D95fE'
28
+ }
29
 
30
  # Load the ABI from the provided JSON file
31
  with open('./contracts/service_registry_abi.json', 'r') as abi_file:
32
  contract_abi = json.load(abi_file)
33
 
34
+ # Create the contract instances
35
+ service_registries = {
36
+ chain_name: web3.eth.contract(address=contract_addresses[chain_name], abi=contract_abi)
37
+ for chain_name, web3 in web3_instances.items()
38
+ }
39
+
40
+ # Check if connections are successful
41
+ for chain_name, web3_instance in web3_instances.items():
42
+ if not web3_instance.is_connected():
43
+ raise Exception(f"Failed to connect to the {chain_name.capitalize()} network.")
44
+ else:
45
+ print(f"Successfully connected to the {chain_name.capitalize()} network.")
46
 
47
  def get_transfers(integrator: str, wallet: str) -> str:
48
  url = f"https://li.quest/v1/analytics/transfers?&wallet={wallet}&fromTimestamp=1726165800"
 
50
  response = requests.get(url, headers=headers)
51
  return response.json()
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  def fetch_and_aggregate_transactions():
 
54
  aggregated_transactions = []
55
  daily_agent_counts = {}
56
+ seen_agents = set()
57
+
58
+ for chain_name, service_registry in service_registries.items():
59
+ web3 = web3_instances[chain_name]
60
+ total_services = service_registry.functions.totalSupply().call()
61
+ for service_id in range(1, total_services + 1):
62
+ service = service_registry.functions.getService(service_id).call()
63
+ agent_ids = service[-1]
64
+
65
+ if 40 in agent_ids or 25 in agent_ids:
66
+ agent_address = service_registry.functions.getAgentInstances(service_id).call()[1][0]
67
+ response_transfers = get_transfers("valory", agent_address)
68
+ transfers = response_transfers.get("transfers", [])
69
+
70
+ if isinstance(transfers, list):
71
+ aggregated_transactions.extend(transfers)
72
+
73
+ # Track the daily number of agents
74
+ current_date = ""
75
+ creation_event = service_registry.events.CreateService.create_filter(from_block=0, argument_filters={'serviceId': service_id}).get_all_entries()
76
+ if creation_event:
77
+ block_number = creation_event[0]['blockNumber']
78
+ block = web3.eth.get_block(block_number)
79
+ creation_timestamp = datetime.fromtimestamp(block['timestamp'])
80
+ date_str = creation_timestamp.strftime('%Y-%m-%d')
81
+ current_date = date_str
82
+
83
+ # Ensure each agent is only counted once based on first registered date
84
+ if agent_address not in seen_agents:
85
+ seen_agents.add(agent_address)
86
+ if date_str not in daily_agent_counts:
87
+ daily_agent_counts[date_str] = set()
88
+ daily_agent_counts[date_str].add(agent_address)
 
 
 
 
89
  daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()}
90
+ return aggregated_transactions, daily_agent_counts
 
91
 
92
  # Function to parse the transaction data and prepare it for visualization
93
  def process_transactions_and_agents(data):
94
+ transactions, daily_agent_counts = data
95
 
96
  # Convert the data into a pandas DataFrame for easy manipulation
97
  rows = []
 
126
  })
127
 
128
  df_transactions = pd.DataFrame(rows)
129
+ df_transactions = df_transactions.drop_duplicates()
130
  df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count'])
 
 
 
131
  df_agents['date'] = pd.to_datetime(df_agents['date'])
 
 
 
132
  df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time)
 
133
 
 
134
  df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index()
 
135
 
136
+ return df_transactions, df_agents, df_agents_weekly
137
 
138
  # Function to create visualizations based on the metrics
139
  def create_visualizations():
140
  transactions_data = fetch_and_aggregate_transactions()
141
+ df_transactions, df_agents, df_agents_weekly = process_transactions_and_agents(transactions_data)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
 
143
  chain_name_map = {
144
  10: "Optimism",
145
  8453: "Base",
146
  1: "Ethereum",
147
+ 34443: "Mode"
148
  }
149
+
150
  df_transactions["sending_chain"] = df_transactions["sending_chain"].map(chain_name_map)
151
  df_transactions["receiving_chain"] = df_transactions["receiving_chain"].map(chain_name_map)
152
 
 
153
  df_transactions["sending_chain"] = df_transactions["sending_chain"].astype(str)
154
  df_transactions["receiving_chain"] = df_transactions["receiving_chain"].astype(str)
155
  df_transactions['date'] = pd.to_datetime(df_transactions['date'])
156
+ df_transactions["is_swap"] = df_transactions.apply(lambda x: x["sending_chain"] == x["receiving_chain"], axis=1)
157
 
 
 
 
 
158
  swaps_per_chain = df_transactions[df_transactions["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count")
159
  fig_swaps_chain = px.bar(
160
  swaps_per_chain,
 
168
  color_discrete_map={
169
  "Optimism": "blue",
170
  "Ethereum": "darkgreen",
171
+ "Base": "purple",
172
+ "Mode": "orange"
173
  }
174
  )
175
  fig_swaps_chain.update_layout(
 
178
  yaxis=dict(tickmode='linear', tick0=0, dtick=1),
179
  xaxis=dict(
180
  tickmode='array',
181
+ tickvals=[d for d in swaps_per_chain['date'] if d.weekday() == 0],
182
  ticktext=[d.strftime('%m-%d') for d in swaps_per_chain['date'] if d.weekday() == 0],
183
  tickangle=-45,
184
  ),
185
+ bargap=0.6,
186
+ bargroupgap=0.1,
187
+ height=600,
188
+ margin=dict(l=50, r=50, t=50, b=50),
189
  showlegend=True,
190
  legend=dict(
191
  yanchor="top",
 
197
  )
198
  fig_swaps_chain.update_xaxes(tickformat="%m-%d")
199
 
 
 
200
  df_transactions["is_bridge"] = df_transactions.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1)
201
 
 
202
  bridges_per_chain = df_transactions[df_transactions["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count")
203
  fig_bridges_chain = px.bar(
204
  bridges_per_chain,
 
212
  color_discrete_map={
213
  "Optimism": "blue",
214
  "Ethereum": "darkgreen",
215
+ "Base": "purple",
216
+ "Mode": "orange"
217
  }
218
  )
219
  fig_bridges_chain.update_layout(
 
222
  yaxis=dict(tickmode='linear', tick0=0, dtick=1),
223
  xaxis=dict(
224
  tickmode='array',
225
+ tickvals=[d for d in bridges_per_chain['date'] if d.weekday() == 0],
226
  ticktext=[d.strftime('%m-%d') for d in bridges_per_chain['date'] if d.weekday() == 0],
227
  tickangle=-45,
228
  ),
229
+ bargap=0.6,
230
+ bargroupgap=0.1,
231
+ height=600,
232
+ margin=dict(l=50, r=50, t=50, b=50),
233
  showlegend=True,
234
  legend=dict(
235
  yanchor="top",
 
240
  template='plotly_white'
241
  )
242
  fig_bridges_chain.update_xaxes(tickformat="%m-%d")
243
+ df_agents['date'] = pd.to_datetime(df_agents['date'])
244
 
245
+ daily_agents_df = df_agents.groupby('date').agg({'agent_count': 'sum'}).reset_index()
246
+ daily_agents_df.rename(columns={'agent_count': 'daily_agent_count'}, inplace=True)
247
+ weekly_agents_df = df_agents.groupby('week').agg({'agent_count': 'sum'}).reset_index()
248
+ weekly_agents_df.rename(columns={'agent_count': 'weekly_agent_count'}, inplace=True)
249
 
250
+ merged_df = pd.merge(daily_agents_df, df_agents[['date', 'week']], on='date', how='left')
251
+ weekly_merged_df = pd.merge(merged_df, weekly_agents_df, on='week', how='left')
 
 
 
 
 
 
 
252
 
253
+ adjustment_date = pd.to_datetime('2024-11-15')
254
+ weekly_merged_df.loc[weekly_merged_df['date'] == adjustment_date, 'daily_agent_count'] -= 1
255
+ weekly_merged_df.loc[weekly_merged_df['date'] == adjustment_date, 'weekly_agent_count'] -= 1
 
 
 
 
256
  fig_agents_registered = go.Figure(data=[
257
  go.Bar(
258
  name='Daily nr of Registered Agents',
259
+ x=weekly_merged_df['date'],
260
+ y=weekly_merged_df['daily_agent_count'],
261
  opacity=0.7,
262
  marker_color='blue'
263
  ),
264
  go.Bar(
265
  name='Total Weekly Nr of Registered Agents',
266
+ x=weekly_merged_df['date'],
267
+ y=weekly_merged_df['weekly_agent_count'],
268
  opacity=0.7,
269
  marker_color='purple'
270
  )
271
  ])
272
 
 
273
  fig_agents_registered.update_layout(
274
  xaxis_title='Date',
275
  yaxis_title='Number of Agents',
 
278
  yaxis=dict(tickmode='linear', tick0=0, dtick=1),
279
  xaxis=dict(
280
  tickmode='array',
281
+ tickvals=weekly_merged_df['date'],
282
+ ticktext=[d.strftime("%b %d") for d in weekly_merged_df['date']],
283
  tickangle=-45
284
  ),
285
+ bargap=0.6,
286
+ height=600,
287
+ margin=dict(l=50, r=50, t=50, b=50),
288
  showlegend=True,
289
  template='plotly_white'
290
  )
291
 
292
+ return fig_swaps_chain, fig_bridges_chain, fig_agents_registered
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
293
 
294
  # Gradio interface
295
  def dashboard():
 
299
  fig_tx_chain = create_transcation_visualizations()
300
  gr.Plot(fig_tx_chain)
301
 
302
+ fig_swaps_chain, fig_bridges_chain, fig_agents_registered = create_visualizations()
 
303
  with gr.Tab("Swaps Daily"):
304
  gr.Plot(fig_swaps_chain)
305
 
306
  with gr.Tab("Bridges Daily"):
 
307
  gr.Plot(fig_bridges_chain)
308
 
309
  with gr.Tab("Nr of Agents Registered"):
 
310
  gr.Plot(fig_agents_registered)
311
+
312
  with gr.Tab("DAA"):
313
  fig_agents_with_transactions_daily = create_active_agents_visualizations()
314
+ gr.Plot(fig_agents_with_transactions_daily)
315
+
 
 
 
 
 
316
  return demo
317
 
318
  # Launch the dashboard