gauravlochab
fix: remove gaps in date
5b3ed4c
import requests
import pandas as pd
import gradio as gr
import plotly.graph_objects as go
import plotly.express as px
from datetime import datetime, timedelta
import json
from web3 import Web3
import os
from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
# Load environment variables from .env file
# RPC URLs
OPTIMISM_RPC_URL = os.getenv('OPTIMISM_RPC_URL')
MODE_RPC_URL = os.getenv('MODE_RPC_URL')
# Initialize Web3 instances
web3_instances = {
'optimism': Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL)),
'mode': Web3(Web3.HTTPProvider(MODE_RPC_URL))
}
# Contract addresses for service registries
contract_addresses = {
'optimism': '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44',
'mode': '0x3C1fF68f5aa342D296d4DEe4Bb1cACCA912D95fE'
}
# Load the ABI from the provided JSON file
with open('./contracts/service_registry_abi.json', 'r') as abi_file:
contract_abi = json.load(abi_file)
# Create the contract instances
service_registries = {
chain_name: web3.eth.contract(address=contract_addresses[chain_name], abi=contract_abi)
for chain_name, web3 in web3_instances.items()
}
# Check if connections are successful
for chain_name, web3_instance in web3_instances.items():
if not web3_instance.is_connected():
raise Exception(f"Failed to connect to the {chain_name.capitalize()} network.")
else:
print(f"Successfully connected to the {chain_name.capitalize()} network.")
def get_transfers(integrator: str, wallet: str) -> str:
url = f"https://li.quest/v1/analytics/transfers?&wallet={wallet}&fromTimestamp=1726165800"
headers = {"accept": "application/json"}
response = requests.get(url, headers=headers)
return response.json()
def fetch_and_aggregate_transactions():
aggregated_transactions = []
daily_agent_counts = {}
seen_agents = set()
for chain_name, service_registry in service_registries.items():
web3 = web3_instances[chain_name]
total_services = service_registry.functions.totalSupply().call()
for service_id in range(1, total_services + 1):
service = service_registry.functions.getService(service_id).call()
agent_ids = service[-1]
if 40 in agent_ids or 25 in agent_ids:
agent_instance_data = service_registry.functions.getAgentInstances(service_id).call()
agent_addresses = agent_instance_data[1]
if agent_addresses:
agent_address = agent_addresses[0]
response_transfers = get_transfers("valory", agent_address)
transfers = response_transfers.get("transfers", [])
if isinstance(transfers, list):
aggregated_transactions.extend(transfers)
# Track the daily number of agents
current_date = ""
creation_event = service_registry.events.CreateService.create_filter(from_block=0, argument_filters={'serviceId': service_id}).get_all_entries()
if creation_event:
block_number = creation_event[0]['blockNumber']
block = web3.eth.get_block(block_number)
creation_timestamp = datetime.fromtimestamp(block['timestamp'])
date_str = creation_timestamp.strftime('%Y-%m-%d')
current_date = date_str
# Ensure each agent is only counted once based on first registered date
if agent_address not in seen_agents:
seen_agents.add(agent_address)
if date_str not in daily_agent_counts:
daily_agent_counts[date_str] = set()
daily_agent_counts[date_str].add(agent_address)
daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()}
return aggregated_transactions, daily_agent_counts
# Function to parse the transaction data and prepare it for visualization
def process_transactions_and_agents(data):
transactions, daily_agent_counts = data
# Convert the data into a pandas DataFrame for easy manipulation
rows = []
for tx in transactions:
# Normalize amounts
sending_amount = float(tx["sending"]["amount"]) / (10 ** tx["sending"]["token"]["decimals"])
receiving_amount = float(tx["receiving"]["amount"]) / (10 ** tx["receiving"]["token"]["decimals"])
# Convert timestamps to datetime objects
sending_timestamp = datetime.utcfromtimestamp(tx["sending"]["timestamp"])
receiving_timestamp = datetime.utcfromtimestamp(tx["receiving"]["timestamp"])
# Prepare row data
rows.append({
"transactionId": tx["transactionId"],
"from_address": tx["fromAddress"],
"to_address": tx["toAddress"],
"sending_chain": tx["sending"]["chainId"],
"receiving_chain": tx["receiving"]["chainId"],
"sending_token_symbol": tx["sending"]["token"]["symbol"],
"receiving_token_symbol": tx["receiving"]["token"]["symbol"],
"sending_amount": sending_amount,
"receiving_amount": receiving_amount,
"sending_amount_usd": float(tx["sending"]["amountUSD"]),
"receiving_amount_usd": float(tx["receiving"]["amountUSD"]),
"sending_gas_used": int(tx["sending"]["gasUsed"]),
"receiving_gas_used": int(tx["receiving"]["gasUsed"]),
"sending_timestamp": sending_timestamp,
"receiving_timestamp": receiving_timestamp,
"date": sending_timestamp.date(), # Group by day
"week": sending_timestamp.strftime('%Y-%m-%d') # Group by week
})
df_transactions = pd.DataFrame(rows)
df_transactions = df_transactions.drop_duplicates()
df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count'])
df_agents['date'] = pd.to_datetime(df_agents['date'])
df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time)
df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index()
return df_transactions, df_agents, df_agents_weekly
# Function to create visualizations based on the metrics
def create_visualizations():
transactions_data = fetch_and_aggregate_transactions()
df_transactions, df_agents, df_agents_weekly = process_transactions_and_agents(transactions_data)
# Fetch daily value locked data
df_tvl = pd.read_csv('daily_value_locked.csv')
# Calculate total value locked per chain per day
df_tvl["total_value_locked_usd"] = df_tvl["amount0_usd"] + df_tvl["amount1_usd"]
df_tvl_daily = df_tvl.groupby(["date", "chain_name"])["total_value_locked_usd"].sum().reset_index()
df_tvl_daily['date'] = pd.to_datetime(df_tvl_daily['date'])
# Filter out dates with zero total value locked
df_tvl_daily = df_tvl_daily[df_tvl_daily["total_value_locked_usd"] > 0]
chain_name_map = {
"mode": "Mode",
"base": "Base",
"ethereum": "Ethereum",
"optimism": "Optimism"
}
df_tvl_daily["chain_name"] = df_tvl_daily["chain_name"].map(chain_name_map)
# Plot total value locked
fig_tvl = px.bar(
df_tvl_daily,
x="date",
y="total_value_locked_usd",
color="chain_name",
opacity=0.7,
title="Total Volume Invested in Pools in Different Chains Daily",
labels={"date": "Date","chain_name": "Transaction Chain", "total_value_locked_usd": "Total Volume Invested (USD)"},
barmode='stack',
color_discrete_map={
"Mode": "orange",
"Base": "purple",
"Ethereum": "darkgreen",
"Optimism": "blue"
}
)
fig_tvl.update_layout(
xaxis_title="Date",
yaxis=dict(tickmode='linear', tick0=0, dtick=4),
xaxis=dict(
tickmode='array',
tickvals=df_tvl_daily['date'],
ticktext=df_tvl_daily['date'].dt.strftime('%b %d'),
tickangle=-45,
),
bargap=0.6, # Increase gap between bar groups (0-1)
bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
height=600,
width=1200, # Specify width to prevent bars from being too wide
showlegend=True,
template='plotly_white'
)
fig_tvl.update_xaxes(tickformat="%b %d")
chain_name_map = {
10: "Optimism",
8453: "Base",
1: "Ethereum",
34443: "Mode"
}
df_transactions["sending_chain"] = df_transactions["sending_chain"].map(chain_name_map)
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].map(chain_name_map)
df_transactions["sending_chain"] = df_transactions["sending_chain"].astype(str)
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].astype(str)
df_transactions['date'] = pd.to_datetime(df_transactions['date'])
df_transactions["is_swap"] = df_transactions.apply(lambda x: x["sending_chain"] == x["receiving_chain"], axis=1)
swaps_per_chain = df_transactions[df_transactions["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count")
fig_swaps_chain = px.bar(
swaps_per_chain,
x="date",
y="swap_count",
color="sending_chain",
title="Chain Daily Activity: Swaps",
labels={"sending_chain": "Transaction Chain", "swap_count": "Daily Swap Nr"},
barmode="stack",
opacity=0.7,
color_discrete_map={
"Optimism": "blue",
"Ethereum": "darkgreen",
"Base": "purple",
"Mode": "orange"
}
)
fig_swaps_chain.update_layout(
xaxis_title="Date",
yaxis_title="Daily Swap Count",
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=[d for d in swaps_per_chain['date']],
ticktext=[d.strftime('%m-%d') for d in swaps_per_chain['date']],
tickangle=-45,
),
bargap=0.6,
bargroupgap=0.1,
height=600,
width=1200,
margin=dict(l=50, r=50, t=50, b=50),
showlegend=True,
legend=dict(
yanchor="top",
y=0.99,
xanchor="right",
x=0.99
),
template='plotly_white'
)
fig_swaps_chain.update_xaxes(tickformat="%m-%d")
df_transactions["is_bridge"] = df_transactions.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1)
bridges_per_chain = df_transactions[df_transactions["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count")
fig_bridges_chain = px.bar(
bridges_per_chain,
x="date",
y="bridge_count",
color="sending_chain",
title="Chain Daily Activity: Bridges",
labels={"sending_chain": "Transaction Chain", "bridge_count": "Daily Bridge Nr"},
barmode="stack",
opacity=0.7,
color_discrete_map={
"Optimism": "blue",
"Ethereum": "darkgreen",
"Base": "purple",
"Mode": "orange"
}
)
fig_bridges_chain.update_layout(
xaxis_title="Date",
yaxis_title="Daily Bridge Count",
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=[d for d in bridges_per_chain['date']],
ticktext=[d.strftime('%m-%d') for d in bridges_per_chain['date']],
tickangle=-45,
),
bargap=0.6,
bargroupgap=0.1,
height=600,
width=1200,
margin=dict(l=50, r=50, t=50, b=50),
showlegend=True,
legend=dict(
yanchor="top",
y=0.99,
xanchor="right",
x=0.99
),
template='plotly_white'
)
fig_bridges_chain.update_xaxes(tickformat="%m-%d")
df_agents['date'] = pd.to_datetime(df_agents['date'])
daily_agents_df = df_agents.groupby('date').agg({'agent_count': 'sum'}).reset_index()
daily_agents_df.rename(columns={'agent_count': 'daily_agent_count'}, inplace=True)
# Sort by date to ensure proper running total calculation
daily_agents_df = daily_agents_df.sort_values('date')
# Create week column
daily_agents_df['week'] = daily_agents_df['date'].dt.to_period('W').apply(lambda r: r.start_time)
# Calculate running total within each week
daily_agents_df['running_weekly_total'] = daily_agents_df.groupby('week')['daily_agent_count'].cumsum()
# Create final merged dataframe
weekly_merged_df = daily_agents_df.copy()
adjustment_date = pd.to_datetime('2024-11-15')
weekly_merged_df.loc[weekly_merged_df['date'] == adjustment_date, 'daily_agent_count'] -= 1
weekly_merged_df.loc[weekly_merged_df['date'] == adjustment_date, 'running_weekly_total'] -= 1
fig_agents_registered = go.Figure(data=[
go.Bar(
name='Daily nr of Registered Agents',
x=weekly_merged_df['date'].dt.strftime("%b %d"),
y=weekly_merged_df['daily_agent_count'],
opacity=0.7,
marker_color='blue'
),
go.Bar(
name='Weekly Nr of Registered Agents',
x=weekly_merged_df['date'].dt.strftime("%b %d"),
y=weekly_merged_df['running_weekly_total'],
opacity=0.7,
marker_color='purple'
)
])
fig_agents_registered.update_layout(
xaxis_title='Date',
yaxis_title='Number of Agents',
title="Nr of Agents Registered",
barmode='group',
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
categoryorder='array',
categoryarray=weekly_merged_df['date'].dt.strftime("%b %d"),
tickangle=-45
),
bargap=0.3,
height=600,
width=1200,
showlegend=True,
legend=dict(
yanchor="top",
xanchor="right",
),
template='plotly_white',
)
return fig_swaps_chain, fig_bridges_chain, fig_agents_registered,fig_tvl
# Gradio interface
def dashboard():
with gr.Blocks() as demo:
gr.Markdown("# Valory Transactions Dashboard")
with gr.Tab("Chain Daily activity"):
fig_tx_chain = create_transcation_visualizations()
gr.Plot(fig_tx_chain)
fig_swaps_chain, fig_bridges_chain, fig_agents_registered,fig_tvl = create_visualizations()
with gr.Tab("Swaps Daily"):
gr.Plot(fig_swaps_chain)
with gr.Tab("Bridges Daily"):
gr.Plot(fig_bridges_chain)
with gr.Tab("Nr of Agents Registered"):
gr.Plot(fig_agents_registered)
with gr.Tab("DAA"):
fig_agents_with_transactions_daily = create_active_agents_visualizations()
gr.Plot(fig_agents_with_transactions_daily)
with gr.Tab("Total Value Locked"):
gr.Plot(fig_tvl)
return demo
# Launch the dashboard
if __name__ == "__main__":
dashboard().launch()