optimus-metrics / app.py
gauravlochab
Add agent/service metrics
fd97ba6
raw
history blame
7.12 kB
import requests
import pandas as pd
import gradio as gr
import plotly.express as px
from datetime import datetime, timedelta
from web3 import Web3
from web3.middleware import geth_poa_middleware
# Optimism RPC endpoint (you can use Infura, Alchemy, or any other provider)
OPTIMISM_RPC = "https://mainnet.optimism.io"
# ServiceRegistry contract address and ABI
SERVICE_REGISTRY_ADDRESS = "0x3d77596beb0f130a4415df3D2D8232B3d3D31e44"
SERVICE_REGISTRY_ABI = [
# Include the ABI for the functions you need
# totalSupply() and getService(uint256)
{
"constant": True,
"inputs": [],
"name": "totalSupply",
"outputs": [{"name": "", "type": "uint256"}],
"type": "function",
},
{
"constant": True,
"inputs": [{"name": "serviceId", "type": "uint256"}],
"name": "getService",
"outputs": [
{"name": "owner", "type": "address"},
{"name": "multisig", "type": "address"},
{"name": "agentInstances", "type": "address[]"},
{"name": "threshold", "type": "uint32"},
{"name": "bonds", "type": "uint256[]"},
{"name": "agentIds", "type": "uint256[]"},
{"name": "state", "type": "uint8"},
],
"type": "function",
},
]
# Gnosis Safe ABI (for getting the nonce)
SAFE_ABI = [
{
"constant": True,
"inputs": [],
"name": "nonce",
"outputs": [{"name": "", "type": "uint256"}],
"type": "function",
},
]
def fetch_services():
w3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC))
w3.middleware_onion.inject(geth_poa_middleware, layer=0) # For Optimism
service_registry = w3.eth.contract(
address=Web3.toChecksumAddress(SERVICE_REGISTRY_ADDRESS),
abi=SERVICE_REGISTRY_ABI,
)
total_services = service_registry.functions.totalSupply().call()
print(f"Total services: {total_services}")
services_data = []
for service_id in range(1, total_services + 1):
service = service_registry.functions.getService(service_id).call()
owner = service[0]
multisig = service[1]
agent_ids = service[5] # List of agent IDs
state = service[6]
# Filter for Optimus services (agentId == 25)
if 25 in agent_ids:
# Get the registration block number (we need to get the block number of the transaction that created the service)
# Since we don't have a subgraph, we'll approximate by using the block number of the latest transaction involving the service multisig
# Note: This is a simplification and may not be accurate
# For accurate results, a subgraph or indexed data is recommended
services_data.append({
"service_id": service_id,
"owner": owner,
"multisig": multisig,
"agent_ids": agent_ids,
"state": state,
})
return services_data
def fetch_service_creation_dates(services_data):
w3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC))
w3.middleware_onion.inject(geth_poa_middleware, layer=0) # For Optimism
# Since we don't have direct access to service creation dates, we'll need to fetch historical logs
# Unfortunately, without a subgraph or an indexer, this is impractical for a large number of services
# As an alternative, we can skip this step or set approximate dates
# For demonstration, we'll set all registration dates to today
for service in services_data:
service["registration_date"] = datetime.utcnow().date()
return services_data
def check_service_transactions(services_data):
w3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC))
w3.middleware_onion.inject(geth_poa_middleware, layer=0) # For Optimism
for service in services_data:
multisig_address = service["multisig"]
safe_contract = w3.eth.contract(
address=multisig_address,
abi=SAFE_ABI,
)
try:
nonce = safe_contract.functions.nonce().call()
service["nonce"] = nonce
except Exception as e:
# If the multisig is not a Gnosis Safe or the call fails
service["nonce"] = 0
return services_data
def process_services_data(services_data):
# Convert to DataFrame
df = pd.DataFrame(services_data)
# Convert registration_date to datetime
df["registration_date"] = pd.to_datetime(df["registration_date"])
# Add week number
df["week"] = df["registration_date"].dt.strftime('%Y-%W')
# Determine if service has made at least one transaction
df["has_transaction"] = df["nonce"] > 0
return df
def create_services_visualizations(df):
# 1) Number of services registered WoW
services_registered = df.groupby("week").size().reset_index(name="services_registered")
fig_services_registered = px.bar(services_registered, x="week", y="services_registered",
title="Number of Services Registered WoW")
# 2) Number of services making at least one transaction WoW
services_with_tx = df[df["has_transaction"]].groupby("week").size().reset_index(name="services_with_transaction")
fig_services_with_tx = px.bar(services_with_tx, x="week", y="services_with_transaction",
title="Number of Services with at Least One Transaction WoW")
return fig_services_registered, fig_services_with_tx
# Integrate with the existing dashboard
def dashboard():
with gr.Blocks() as demo:
gr.Markdown("# Valory Transactions Dashboard")
# Existing visualizations
with gr.Tab("Transactions per Chain per Agent"):
fig_tx_chain_agent, _, _, _, _ = create_visualizations()
gr.Plot(fig_tx_chain_agent)
with gr.Tab("Opportunities per Agent"):
_, fig_opportunities_agent, _, _, _ = create_visualizations()
gr.Plot(fig_opportunities_agent)
with gr.Tab("Investment per Agent"):
_, _, fig_investment_agent, _, _ = create_visualizations()
gr.Plot(fig_investment_agent)
with gr.Tab("Swaps per Day"):
_, _, _, fig_swaps_per_day, _ = create_visualizations()
gr.Plot(fig_swaps_per_day)
with gr.Tab("Aggregated Metrics"):
_, _, _, _, fig_stats = create_visualizations()
gr.Plot(fig_stats)
# New visualizations for services
with gr.Tab("Services Registered WoW"):
services_data = fetch_services()
services_data = fetch_service_creation_dates(services_data)
services_data = check_service_transactions(services_data)
df_services = process_services_data(services_data)
fig_services_registered, fig_services_with_tx = create_services_visualizations(df_services)
gr.Plot(fig_services_registered)
with gr.Tab("Services with Transactions WoW"):
gr.Plot(fig_services_with_tx)
return demo
if __name__ == "__main__":
dashboard().launch()