gauravlochab
chore: Add requirements.txt and restart scripts
322b74c
raw
history blame
20.9 kB
import requests
import pandas as pd
import gradio as gr
import plotly.graph_objects as go
import plotly.express as px
from datetime import datetime, timedelta
import json
from web3 import Web3
from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
from app_value_locked import fetch_daily_value_locked
import os
OPTIMISM_RPC_URL = os.getenv('OPTIMISM_RPC_URL')
# Initialize a Web3 instance
web3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL))
# Check if connection is successful
if not web3.is_connected():
raise Exception("Failed to connect to the Optimism network.")
# Contract address
contract_address = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44'
# Load the ABI from the provided JSON file
with open('./contracts/service_registry_abi.json', 'r') as abi_file:
contract_abi = json.load(abi_file)
# Now you can create the contract
service_registry = web3.eth.contract(address=contract_address, abi=contract_abi)
def get_transfers(integrator: str, wallet: str) -> str:
url = f"https://li.quest/v1/analytics/transfers?integrator={integrator}&wallet={wallet}"
headers = {"accept": "application/json"}
response = requests.get(url, headers=headers)
return response.json()
def load_activity_checker_contract(w3, staking_token_address):
"""
Loads the Staking Token and Activity Checker contracts.
:param w3: Web3 instance
:param staking_token_address: Address of the staking token contract
:return: Tuple of (Staking Token contract instance, Activity Checker contract instance)
"""
try:
# Load the ABI file for the Staking Token contract
with open('./contracts/StakingToken.json', "r", encoding="utf-8") as file:
staking_token_data = json.load(file)
staking_token_abi = staking_token_data.get("abi", [])
# Create the Staking Token contract instance
staking_token_contract = w3.eth.contract(address=staking_token_address, abi=staking_token_abi)
# Get the activity checker contract address from staking_token_contract
activity_checker_address = staking_token_contract.functions.activityChecker().call()
# Load the ABI file for the Activity Checker contract
with open('./contracts/StakingActivityChecker.json', "r", encoding="utf-8") as file:
activity_checker_data = json.load(file)
activity_checker_abi = activity_checker_data.get("abi", [])
# Create the Activity Checker contract instance
activity_checker_contract = w3.eth.contract(address=activity_checker_address, abi=activity_checker_abi)
return staking_token_contract, activity_checker_contract
except Exception as e:
print(f"An error occurred while loading the contracts: {e}")
raise
def fetch_and_aggregate_transactions():
total_services = service_registry.functions.totalSupply().call()
aggregated_transactions = []
daily_agent_counts = {}
daily_agents_with_transactions = {}
_staking_token_contract, activity_checker_contract = load_activity_checker_contract(web3, '0x88996bbdE7f982D93214881756840cE2c77C4992')
for service_id in range(1, total_services + 1):
service = service_registry.functions.getService(service_id).call()
# Extract the list of agent IDs from the service data
agent_ids = service[-1] # Assuming the last element is the list of agent IDs
# Check if 25 is in the list of agent IDs
if 25 in agent_ids:
agent_address = service_registry.functions.getAgentInstances(service_id).call()[1][0]
response_transfers = get_transfers("valory", agent_address)
transfers = response_transfers.get("transfers", [])
if isinstance(transfers, list):
aggregated_transactions.extend(transfers)
# Track the daily number of agents
creation_event = service_registry.events.CreateService.create_filter(
from_block=0, argument_filters={'serviceId': service_id, 'configHash': service[2]}
).get_all_entries()
if creation_event:
block_number = creation_event[0]['blockNumber']
block = web3.eth.get_block(block_number)
creation_timestamp = datetime.fromtimestamp(block['timestamp'])
date_str = creation_timestamp.strftime('%Y-%m-%d')
print("date_str",date_str)
if date_str not in daily_agent_counts:
daily_agent_counts[date_str] = set()
if date_str not in daily_agents_with_transactions:
daily_agents_with_transactions[date_str] = set()
service_safe = service[1]
print("agent_address",agent_address,"service_safe",service_safe)
multisig_nonces = activity_checker_contract.functions.getMultisigNonces(service_safe).call()[0]
if multisig_nonces > 0:
daily_agents_with_transactions[date_str].add(agent_address)
daily_agent_counts[date_str].add(agent_address)
# Convert set to count
daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()}
daily_agents_with_transactions = {date: len(agents) for date, agents in daily_agents_with_transactions.items()}
return aggregated_transactions, daily_agent_counts, daily_agents_with_transactions
# Function to parse the transaction data and prepare it for visualization
def process_transactions_and_agents(data):
transactions, daily_agent_counts, daily_agents_with_transactions = data
# Convert the data into a pandas DataFrame for easy manipulation
rows = []
for tx in transactions:
# Normalize amounts
sending_amount = float(tx["sending"]["amount"]) / (10 ** tx["sending"]["token"]["decimals"])
receiving_amount = float(tx["receiving"]["amount"]) / (10 ** tx["receiving"]["token"]["decimals"])
# Convert timestamps to datetime objects
sending_timestamp = datetime.utcfromtimestamp(tx["sending"]["timestamp"])
receiving_timestamp = datetime.utcfromtimestamp(tx["receiving"]["timestamp"])
# Prepare row data
rows.append({
"transactionId": tx["transactionId"],
"from_address": tx["fromAddress"],
"to_address": tx["toAddress"],
"sending_chain": tx["sending"]["chainId"],
"receiving_chain": tx["receiving"]["chainId"],
"sending_token_symbol": tx["sending"]["token"]["symbol"],
"receiving_token_symbol": tx["receiving"]["token"]["symbol"],
"sending_amount": sending_amount,
"receiving_amount": receiving_amount,
"sending_amount_usd": float(tx["sending"]["amountUSD"]),
"receiving_amount_usd": float(tx["receiving"]["amountUSD"]),
"sending_gas_used": int(tx["sending"]["gasUsed"]),
"receiving_gas_used": int(tx["receiving"]["gasUsed"]),
"sending_timestamp": sending_timestamp,
"receiving_timestamp": receiving_timestamp,
"date": sending_timestamp.date(), # Group by day
"week": sending_timestamp.strftime('%Y-%m-%d') # Group by week
})
df_transactions = pd.DataFrame(rows)
df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count'])
df_agents_with_transactions = pd.DataFrame(list(daily_agents_with_transactions.items()), columns=['date', 'agent_count_with_transactions'])
# Convert the date column to datetime
df_agents['date'] = pd.to_datetime(df_agents['date'])
df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date'])
# Convert to week periods
df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time)
df_agents_with_transactions['week'] = df_agents_with_transactions['date'].dt.to_period('W').apply(lambda r: r.start_time)
# Group by week
df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index()
df_agents_with_transactions_weekly = df_agents_with_transactions[['week', 'agent_count_with_transactions']].groupby('week').sum().reset_index()
return df_transactions, df_agents_weekly, df_agents_with_transactions_weekly, df_agents_with_transactions
# Function to create visualizations based on the metrics
def create_visualizations():
transactions_data = fetch_and_aggregate_transactions()
df_transactions, df_agents_weekly, df_agents_with_transactions_weekly, df_agents_with_transactions = process_transactions_and_agents(transactions_data)
# Map chain IDs to chain names
# Fetch daily value locked data
df_tvl = fetch_daily_value_locked()
# Calculate total value locked per chain per day
df_tvl["total_value_locked_usd"] = df_tvl["amount0_usd"] + df_tvl["amount1_usd"]
df_tvl_daily = df_tvl.groupby(["date", "chain_name"])["total_value_locked_usd"].sum().reset_index()
df_tvl_daily['date'] = pd.to_datetime(df_tvl_daily['date'])
# Filter out dates with zero total value locked
df_tvl_daily = df_tvl_daily[df_tvl_daily["total_value_locked_usd"] > 0]
chain_name_map = {
"optimism": "Optimism",
"base": "Base",
"ethereum": "Ethereum"
}
df_tvl_daily["chain_name"] = df_tvl_daily["chain_name"].map(chain_name_map)
# Plot total value locked
fig_tvl = px.bar(
df_tvl_daily,
x="date",
y="total_value_locked_usd",
color="chain_name",
opacity=0.7,
title="Total Volume Invested in Pools in Different Chains Daily",
labels={"date": "Date","chain_name": "Transaction Chain", "total_value_locked_usd": "Total Volume Invested (USD)"},
barmode='stack',
color_discrete_map={
"Optimism": "blue",
"Base": "purple",
"Ethereum": "darkgreen"
}
)
fig_tvl.update_layout(
xaxis_title=None,
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=df_tvl_daily['date'],
ticktext=df_tvl_daily['date'].dt.strftime('%b %d'),
tickangle=-45,
),
bargap=0.6, # Increase gap between bar groups (0-1)
bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
height=600, # Specify width to prevent bars from being too wide
margin=dict(l=50, r=50, t=50, b=50), # Add margins
showlegend=True,
template='plotly_white'
)
fig_tvl.update_xaxes(tickformat="%b %d")
chain_name_map = {
10: "Optimism",
8453: "Base",
1: "Ethereum"
}
df_transactions["sending_chain"] = df_transactions["sending_chain"].map(chain_name_map)
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].map(chain_name_map)
# Ensure that chain IDs are strings for consistent grouping
df_transactions["sending_chain"] = df_transactions["sending_chain"].astype(str)
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].astype(str)
df_transactions['date'] = pd.to_datetime(df_transactions['date'])
# Identify swap transactions
df_transactions["is_swap"] = df_transactions.apply(lambda x: x["sending_token_symbol"] != x["receiving_token_symbol"], axis=1)
# Total swaps per chain per day
swaps_per_chain = df_transactions[df_transactions["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count")
fig_swaps_chain = px.bar(
swaps_per_chain,
x="date",
y="swap_count",
color="sending_chain",
title="Chain Daily Activity: Swaps",
labels={"sending_chain": "Transaction Chain", "swap_count": "Daily Swap Nr"},
barmode="stack",
opacity=0.7,
color_discrete_map={
"Optimism": "blue",
"Ethereum": "darkgreen",
"Base": "purple"
}
)
fig_swaps_chain.update_layout(
xaxis_title="Date",
yaxis_title="Daily Swap Count",
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=[d for d in swaps_per_chain['date'] if d.weekday() == 0], # Show only Mondays
ticktext=[d.strftime('%m-%d') for d in swaps_per_chain['date'] if d.weekday() == 0],
tickangle=-45,
),
bargap=0.6, # Increase gap between bar groups (0-1)
bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
height=600, # Specify width to prevent bars from being too wide
margin=dict(l=50, r=50, t=50, b=50), # Add margins
showlegend=True,
legend=dict(
yanchor="top",
y=0.99,
xanchor="right",
x=0.99
),
template='plotly_white'
)
fig_swaps_chain.update_xaxes(tickformat="%m-%d")
# Identify bridge transactions
# Identify bridge transactions
df_transactions["is_bridge"] = df_transactions.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1)
# Total bridges per chain per day
bridges_per_chain = df_transactions[df_transactions["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count")
fig_bridges_chain = px.bar(
bridges_per_chain,
x="date",
y="bridge_count",
color="sending_chain",
title="Chain Daily Activity: Bridges",
labels={"sending_chain": "Transaction Chain", "bridge_count": "Daily Bridge Nr"},
barmode="stack",
opacity=0.7,
color_discrete_map={
"Optimism": "blue",
"Ethereum": "darkgreen",
"Base": "purple"
}
)
fig_bridges_chain.update_layout(
xaxis_title="Date",
yaxis_title="Daily Bridge Count",
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=[d for d in bridges_per_chain['date'] if d.weekday() == 0], # Show only Mondays
ticktext=[d.strftime('%m-%d') for d in bridges_per_chain['date'] if d.weekday() == 0],
tickangle=-45,
),
bargap=0.6, # Increase gap between bar groups (0-1)
bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
height=600, # Specify width to prevent bars from being too wide
margin=dict(l=50, r=50, t=50, b=50), # Add margins
showlegend=True,
legend=dict(
yanchor="top",
y=0.99,
xanchor="right",
x=0.99
),
template='plotly_white'
)
fig_bridges_chain.update_xaxes(tickformat="%m-%d")
# Nr of agents registered daily and weekly
# Convert 'date' column to datetime
df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date'])
# Calculate daily number of agents registered
daily_agents_df = df_agents_with_transactions.groupby('date').size().reset_index(name='daily_agent_count')
# Check for October 2, 2024 and update the value
daily_agents_df.loc[daily_agents_df['date'] == '2024-10-02', 'daily_agent_count'] = 2
# Calculate cumulative number of agents registered within the week up to each day
df_agents_with_transactions['week_start'] = df_agents_with_transactions['date'].dt.to_period("W").apply(lambda r: r.start_time)
cumulative_agents_df = df_agents_with_transactions.groupby(['week_start', 'date']).size().groupby(level=0).cumsum().reset_index(name='weekly_agent_count')
# Check for October 2, 2024 and update the value
cumulative_agents_df.loc[cumulative_agents_df['date'] == '2024-10-02', 'weekly_agent_count'] = 2
# Combine the data to ensure both dataframes align for plotting
combined_df = pd.merge(daily_agents_df, cumulative_agents_df, on='date', how='left')
# Create the bar chart with side-by-side bars
fig_agents_registered = go.Figure(data=[
go.Bar(
name='Daily nr of Registered Agents',
x=combined_df['date'],
y=combined_df['daily_agent_count'],
opacity=0.7,
marker_color='blue'
),
go.Bar(
name='Total Weekly Nr of Registered Agents',
x=combined_df['date'],
y=combined_df['weekly_agent_count'],
opacity=0.7,
marker_color='purple'
)
])
# Update layout to group bars side by side for each day
fig_agents_registered.update_layout(
xaxis_title='Date',
yaxis_title='Number of Agents',
title="Nr of Agents Registered",
barmode='group',
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=combined_df['date'],
ticktext=[d.strftime("%b %d") for d in combined_df['date']],
tickangle=-45
),
bargap=0.6, # Increase gap between bar groups (0-1)
height=600, # Specify width to prevent bars from being too wide
margin=dict(l=50, r=50, t=50, b=50), # Add margins
showlegend=True,
template='plotly_white'
)
# Calculate weekly average daily active agents
df_agents_with_transactions['day_of_week'] = df_agents_with_transactions['date'].dt.dayofweek
df_agents_with_transactions_weekly_avg = df_agents_with_transactions.groupby(['week', 'day_of_week'])['agent_count_with_transactions'].mean().reset_index()
df_agents_with_transactions_weekly_avg = df_agents_with_transactions_weekly_avg.groupby('week')['agent_count_with_transactions'].mean().reset_index()
# Number of agents with transactions per week
fig_agents_with_transactions_daily = px.bar(
df_agents_with_transactions_weekly,
x="week",
opacity=0.7,
y="agent_count_with_transactions",
title="Daily Active Agents: Weekly Average Nr of agents with at least 1 transaction daily",
labels={"week": "Week of", "agent_count_with_transactions": "Number of Agents with Transactions"},
color_discrete_sequence=["darkgreen"]
)
fig_agents_with_transactions_daily.update_layout(
title=dict(
x=0.5,y=0.95,xanchor='center',yanchor='top'), # Adjust vertical position and Center the title
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=df_agents_with_transactions_weekly_avg['week'],
ticktext=df_agents_with_transactions_weekly_avg['week'].dt.strftime('%b %d'),
tickangle=0
),
bargap=0.6, # Increase gap between bar groups (0-1)
bargroupgap=0.1, # Decrease gap between bars in a group (0-1)
height=600, # Specify width to prevent bars from being too wide
margin=dict(l=50, r=50, t=50, b=50), # Add margins
showlegend=True,
legend=dict(
yanchor="top",
y=0.99,
xanchor="right",
x=0.99
)
)
return fig_swaps_chain, fig_bridges_chain, fig_agents_registered, fig_agents_with_transactions_daily,fig_tvl
# Gradio interface
def dashboard():
with gr.Blocks() as demo:
gr.Markdown("# Valory Transactions Dashboard")
with gr.Tab("Chain Daily activity"):
fig_tx_chain = create_transcation_visualizations()
gr.Plot(fig_tx_chain)
fig_swaps_chain, fig_bridges_chain, fig_agents_registered, fig_agents_with_transactions_daily,fig_tvl = create_visualizations()
#Fetch and display visualizations
with gr.Tab("Swaps Daily"):
gr.Plot(fig_swaps_chain)
with gr.Tab("Bridges Daily"):
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations()
gr.Plot(fig_bridges_chain)
with gr.Tab("Nr of Agents Registered"):
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations()
gr.Plot(fig_agents_registered)
with gr.Tab("DAA"):
fig_agents_with_transactions_daily = create_active_agents_visualizations()
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations()
gr.Plot(fig_agents_with_transactions_daily)
with gr.Tab("Total Value Locked"):
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily, fig_tvl,fig_tvl = create_visualizations()
gr.Plot(fig_tvl)
return demo
# Launch the dashboard
if __name__ == "__main__":
dashboard().launch()