Spaces:
Runtime error
Runtime error
import requests | |
import pandas as pd | |
import gradio as gr | |
import plotly.express as px | |
from datetime import datetime, timedelta | |
import json | |
from web3 import Web3 | |
from app_trans_new import create_transcation_visualizations | |
from app_value_locked import fetch_daily_value_locked | |
OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0' | |
# Initialize a Web3 instance | |
web3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL)) | |
# Check if connection is successful | |
if not web3.is_connected(): | |
raise Exception("Failed to connect to the Optimism network.") | |
# Contract address | |
contract_address = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44' | |
# Load the ABI from the provided JSON file | |
with open('./contracts/service_registry_abi.json', 'r') as abi_file: | |
contract_abi = json.load(abi_file) | |
# Now you can create the contract | |
service_registry = web3.eth.contract(address=contract_address, abi=contract_abi) | |
def get_transfers(integrator: str, wallet: str) -> str: | |
url = f"https://li.quest/v1/analytics/transfers?integrator={integrator}&wallet={wallet}" | |
headers = {"accept": "application/json"} | |
response = requests.get(url, headers=headers) | |
return response.json() | |
def load_activity_checker_contract(w3, staking_token_address): | |
""" | |
Loads the Staking Token and Activity Checker contracts. | |
:param w3: Web3 instance | |
:param staking_token_address: Address of the staking token contract | |
:return: Tuple of (Staking Token contract instance, Activity Checker contract instance) | |
""" | |
try: | |
# Load the ABI file for the Staking Token contract | |
with open('./contracts/StakingToken.json', "r", encoding="utf-8") as file: | |
staking_token_data = json.load(file) | |
staking_token_abi = staking_token_data.get("abi", []) | |
# Create the Staking Token contract instance | |
staking_token_contract = w3.eth.contract(address=staking_token_address, abi=staking_token_abi) | |
# Get the activity checker contract address from staking_token_contract | |
activity_checker_address = staking_token_contract.functions.activityChecker().call() | |
# Load the ABI file for the Activity Checker contract | |
with open('./contracts/StakingActivityChecker.json', "r", encoding="utf-8") as file: | |
activity_checker_data = json.load(file) | |
activity_checker_abi = activity_checker_data.get("abi", []) | |
# Create the Activity Checker contract instance | |
activity_checker_contract = w3.eth.contract(address=activity_checker_address, abi=activity_checker_abi) | |
return staking_token_contract, activity_checker_contract | |
except Exception as e: | |
print(f"An error occurred while loading the contracts: {e}") | |
raise | |
def fetch_and_aggregate_transactions(): | |
total_services = service_registry.functions.totalSupply().call() | |
aggregated_transactions = [] | |
daily_agent_counts = {} | |
daily_agents_with_transactions = {} | |
_staking_token_contract, activity_checker_contract = load_activity_checker_contract(web3, '0x88996bbdE7f982D93214881756840cE2c77C4992') | |
for service_id in range(1, total_services + 1): | |
service = service_registry.functions.getService(service_id).call() | |
# Extract the list of agent IDs from the service data | |
agent_ids = service[-1] # Assuming the last element is the list of agent IDs | |
# Check if 25 is in the list of agent IDs | |
if 25 in agent_ids: | |
agent_address = service_registry.functions.getAgentInstances(service_id).call()[1][0] | |
response_transfers = get_transfers("valory", agent_address) | |
transfers = response_transfers.get("transfers", []) | |
if isinstance(transfers, list): | |
aggregated_transactions.extend(transfers) | |
# Track the daily number of agents | |
creation_event = service_registry.events.CreateService.create_filter( | |
from_block=0, argument_filters={'serviceId': service_id, 'configHash': service[2]} | |
).get_all_entries() | |
if creation_event: | |
block_number = creation_event[0]['blockNumber'] | |
block = web3.eth.get_block(block_number) | |
creation_timestamp = datetime.fromtimestamp(block['timestamp']) | |
date_str = creation_timestamp.strftime('%Y-%m-%d') | |
print("date_str",date_str) | |
if date_str not in daily_agent_counts: | |
daily_agent_counts[date_str] = set() | |
if date_str not in daily_agents_with_transactions: | |
daily_agents_with_transactions[date_str] = set() | |
service_safe = service[1] | |
print("agent_address",agent_address,"service_safe",service_safe) | |
multisig_nonces = activity_checker_contract.functions.getMultisigNonces(service_safe).call()[0] | |
if multisig_nonces > 0: | |
daily_agents_with_transactions[date_str].add(agent_address) | |
daily_agent_counts[date_str].add(agent_address) | |
# Convert set to count | |
daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()} | |
daily_agents_with_transactions = {date: len(agents) for date, agents in daily_agents_with_transactions.items()} | |
return aggregated_transactions, daily_agent_counts, daily_agents_with_transactions | |
# Function to parse the transaction data and prepare it for visualization | |
def process_transactions_and_agents(data): | |
transactions, daily_agent_counts, daily_agents_with_transactions = data | |
# Convert the data into a pandas DataFrame for easy manipulation | |
rows = [] | |
for tx in transactions: | |
# Normalize amounts | |
sending_amount = float(tx["sending"]["amount"]) / (10 ** tx["sending"]["token"]["decimals"]) | |
receiving_amount = float(tx["receiving"]["amount"]) / (10 ** tx["receiving"]["token"]["decimals"]) | |
# Convert timestamps to datetime objects | |
sending_timestamp = datetime.utcfromtimestamp(tx["sending"]["timestamp"]) | |
receiving_timestamp = datetime.utcfromtimestamp(tx["receiving"]["timestamp"]) | |
# Prepare row data | |
rows.append({ | |
"transactionId": tx["transactionId"], | |
"from_address": tx["fromAddress"], | |
"to_address": tx["toAddress"], | |
"sending_chain": tx["sending"]["chainId"], | |
"receiving_chain": tx["receiving"]["chainId"], | |
"sending_token_symbol": tx["sending"]["token"]["symbol"], | |
"receiving_token_symbol": tx["receiving"]["token"]["symbol"], | |
"sending_amount": sending_amount, | |
"receiving_amount": receiving_amount, | |
"sending_amount_usd": float(tx["sending"]["amountUSD"]), | |
"receiving_amount_usd": float(tx["receiving"]["amountUSD"]), | |
"sending_gas_used": int(tx["sending"]["gasUsed"]), | |
"receiving_gas_used": int(tx["receiving"]["gasUsed"]), | |
"sending_timestamp": sending_timestamp, | |
"receiving_timestamp": receiving_timestamp, | |
"date": sending_timestamp.date(), # Group by day | |
"week": sending_timestamp.strftime('%Y-%m-%d') # Group by week | |
}) | |
df_transactions = pd.DataFrame(rows) | |
df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count']) | |
df_agents_with_transactions = pd.DataFrame(list(daily_agents_with_transactions.items()), columns=['date', 'agent_count_with_transactions']) | |
# Convert the date column to datetime | |
df_agents['date'] = pd.to_datetime(df_agents['date']) | |
df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date']) | |
# Convert to week periods | |
df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time) | |
df_agents_with_transactions['week'] = df_agents_with_transactions['date'].dt.to_period('W').apply(lambda r: r.start_time) | |
# Group by week | |
df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index() | |
df_agents_with_transactions_weekly = df_agents_with_transactions[['week', 'agent_count_with_transactions']].groupby('week').sum().reset_index() | |
return df_transactions, df_agents_weekly, df_agents_with_transactions_weekly | |
# Function to create visualizations based on the metrics | |
def create_visualizations(): | |
transactions_data = fetch_and_aggregate_transactions() | |
df_transactions, df_agents_weekly, df_agents_with_transactions_weekly = process_transactions_and_agents(transactions_data) | |
# Map chain IDs to chain names | |
# Fetch daily value locked data | |
df_tvl = fetch_daily_value_locked() | |
# Calculate total value locked per chain per day | |
df_tvl["total_value_locked_usd"] = df_tvl["amount0_usd"] + df_tvl["amount1_usd"] | |
df_tvl_daily = df_tvl.groupby(["date", "chain_name"])["total_value_locked_usd"].sum().reset_index() | |
df_tvl_daily['date'] = pd.to_datetime(df_tvl_daily['date']) | |
# Plot total value locked | |
fig_tvl = px.bar( | |
df_tvl_daily, | |
x="date", | |
y="total_value_locked_usd", | |
color="chain_name", | |
title="Total Volume Invested in Pools in Different Chains Daily", | |
labels={"date": "Date", "total_value_locked_usd": "Total Value Locked (USD)"}, | |
barmode='stack', | |
color_discrete_map={ | |
"optimism": "blue", | |
"base": "purple", | |
"ethereum": "darkgreen" | |
} | |
) | |
fig_tvl.update_layout( | |
xaxis_title=None, | |
yaxis=dict(tickmode='linear', tick0=0, dtick=1), | |
xaxis=dict( | |
tickmode='array', | |
tickvals=df_tvl_daily['date'], | |
ticktext=df_tvl_daily['date'].dt.strftime('%Y-%m-%d'), | |
tickangle=0, | |
), | |
bargap=0.8, | |
height=700, | |
) | |
fig_tvl.update_xaxes(tickformat="%Y-%m-%d") | |
chain_name_map = { | |
10: "Optimism", | |
8453: "Base", | |
1: "Ethereum" | |
} | |
df_transactions["sending_chain"] = df_transactions["sending_chain"].map(chain_name_map) | |
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].map(chain_name_map) | |
# Ensure that chain IDs are strings for consistent grouping | |
df_transactions["sending_chain"] = df_transactions["sending_chain"].astype(str) | |
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].astype(str) | |
df_transactions['date'] = pd.to_datetime(df_transactions['date']) | |
# Identify swap transactions | |
df_transactions["is_swap"] = df_transactions.apply(lambda x: x["sending_token_symbol"] != x["receiving_token_symbol"], axis=1) | |
# Total swaps per chain per day | |
swaps_per_chain = df_transactions[df_transactions["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count") | |
fig_swaps_chain = px.bar( | |
swaps_per_chain, | |
x="date", | |
y="swap_count", | |
color="sending_chain", | |
title="Chain Daily Activity: Swaps", | |
labels={"sending_chain": "Transaction Chain", "swap_count": "Daily Swap Nr"}, | |
barmode="stack", | |
color_discrete_map={ | |
"Optimism": "blue", | |
"Ethereum": "darkgreen", | |
"Base": "purple" | |
} | |
) | |
fig_swaps_chain.update_layout( | |
xaxis_title=None, | |
yaxis=dict(tickmode='linear', tick0=0, dtick=1), | |
xaxis=dict( | |
tickmode='array', | |
tickvals=swaps_per_chain['date'], | |
ticktext=swaps_per_chain['date'].dt.strftime('%y-%m-%d'), | |
tickangle=0, | |
), | |
bargap=0.8, | |
height=700, | |
) | |
fig_swaps_chain.update_xaxes(tickformat="%Y-%m-%d") | |
# Identify bridge transactions | |
df_transactions["is_bridge"] = df_transactions.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1) | |
# Total bridges per chain per day | |
bridges_per_chain = df_transactions[df_transactions["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count") | |
fig_bridges_chain = px.bar( | |
bridges_per_chain, | |
x="date", | |
y="bridge_count", | |
color="sending_chain", | |
title="Chain Daily Activity: Bridges", | |
labels={"sending_chain": "Transaction Chain", "bridge_count": "Daily Bridge Nr"}, | |
barmode="stack", | |
color_discrete_map={ | |
"Optimism": "blue", | |
"Ethereum": "darkgreen", | |
"Base": "purple" | |
} | |
) | |
fig_bridges_chain.update_layout( | |
xaxis_title=None, | |
yaxis=dict(tickmode='linear', tick0=0, dtick=1), | |
xaxis=dict( | |
tickmode='array', | |
tickvals=bridges_per_chain['date'], | |
ticktext=bridges_per_chain['date'].dt.strftime('%y-%m-%d'), | |
tickangle=0, | |
), | |
bargap=0.8, | |
height=700, | |
) | |
fig_bridges_chain.update_xaxes(tickformat="%Y-%m-%d") | |
# Number of agents per week | |
fig_agents_daily = px.bar( | |
df_agents_weekly, | |
x="week", | |
y="agent_count", | |
title="Number of Agents Registered WoW", | |
labels={"week": "Week", "agent_count": "Number of Agents"}, | |
color_discrete_sequence=["purple"] | |
) | |
fig_agents_daily.update_layout( | |
xaxis_title=None, | |
yaxis=dict(tickmode='linear', tick0=0, dtick=1), | |
xaxis=dict( | |
tickmode='array', | |
tickvals=df_agents_weekly['week'], | |
ticktext=df_agents_weekly['week'].dt.strftime('%y-%m-%d'), | |
tickangle=0 | |
), | |
bargap=0.8, | |
height=700, | |
) | |
# Number of agents with transactions per week | |
fig_agents_with_transactions_daily = px.bar( | |
df_agents_with_transactions_weekly, | |
x="week", | |
y="agent_count_with_transactions", | |
title="Agents With at Least 1 Transaction WoW", | |
labels={"week": "Week", "agent_count_with_transactions": "Number of Agents with Transactions"}, | |
color_discrete_sequence=["darkgreen"] | |
) | |
fig_agents_with_transactions_daily.update_layout( | |
xaxis_title=None, | |
yaxis=dict(tickmode='linear', tick0=0, dtick=1), | |
xaxis=dict( | |
tickmode='array', | |
tickvals=df_agents_with_transactions_weekly['week'], | |
ticktext=df_agents_with_transactions_weekly['week'].dt.strftime('%y-%m-%d'), | |
tickangle=0 | |
), | |
bargap=0.8, | |
height=700, | |
) | |
return fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl | |
# Gradio interface | |
def dashboard(): | |
with gr.Blocks() as demo: | |
gr.Markdown("# Valory Transactions Dashboard") | |
with gr.Tab("Transactions"): | |
fig_tx_chain = create_transcation_visualizations() | |
gr.Plot(fig_tx_chain) | |
fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations() | |
#Fetch and display visualizations | |
with gr.Tab("Swaps"): | |
gr.Plot(fig_swaps_chain) | |
with gr.Tab("Bridges"): | |
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations() | |
gr.Plot(fig_bridges_chain) | |
with gr.Tab("Agents Week Over Week"): | |
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations() | |
gr.Plot(fig_agents_daily) | |
with gr.Tab("Agents with Transactions Week Over Week"): | |
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily,fig_tvl = create_visualizations() | |
gr.Plot(fig_agents_with_transactions_daily) | |
with gr.Tab("Total Value Locked"): | |
#fig_swaps_chain, fig_bridges_chain, fig_agents_daily, fig_agents_with_transactions_daily, fig_tvl,fig_tvl = create_visualizations() | |
gr.Plot(fig_tvl) | |
return demo | |
# Launch the dashboard | |
if __name__ == "__main__": | |
dashboard().launch() |