gauravlochab commited on
Commit
bffbc7a
·
1 Parent(s): 5b3ed4c

chore: add APR graph

Browse files
Files changed (2) hide show
  1. app.py +112 -108
  2. apr_visualization.py +588 -0
app.py CHANGED
@@ -5,9 +5,41 @@ import plotly.graph_objects as go
5
  import plotly.express as px
6
  from datetime import datetime, timedelta
7
  import json
8
- from web3 import Web3
 
9
  import os
10
- from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  # Load environment variables from .env file
12
  # RPC URLs
13
  OPTIMISM_RPC_URL = os.getenv('OPTIMISM_RPC_URL')
@@ -41,103 +73,29 @@ for chain_name, web3_instance in web3_instances.items():
41
  raise Exception(f"Failed to connect to the {chain_name.capitalize()} network.")
42
  else:
43
  print(f"Successfully connected to the {chain_name.capitalize()} network.")
 
44
 
 
45
  def get_transfers(integrator: str, wallet: str) -> str:
46
- url = f"https://li.quest/v1/analytics/transfers?&wallet={wallet}&fromTimestamp=1726165800"
47
- headers = {"accept": "application/json"}
48
- response = requests.get(url, headers=headers)
49
- return response.json()
50
 
51
  def fetch_and_aggregate_transactions():
52
- aggregated_transactions = []
53
- daily_agent_counts = {}
54
- seen_agents = set()
55
-
56
- for chain_name, service_registry in service_registries.items():
57
- web3 = web3_instances[chain_name]
58
- total_services = service_registry.functions.totalSupply().call()
59
-
60
- for service_id in range(1, total_services + 1):
61
- service = service_registry.functions.getService(service_id).call()
62
- agent_ids = service[-1]
63
- if 40 in agent_ids or 25 in agent_ids:
64
- agent_instance_data = service_registry.functions.getAgentInstances(service_id).call()
65
- agent_addresses = agent_instance_data[1]
66
- if agent_addresses:
67
- agent_address = agent_addresses[0]
68
- response_transfers = get_transfers("valory", agent_address)
69
- transfers = response_transfers.get("transfers", [])
70
-
71
- if isinstance(transfers, list):
72
- aggregated_transactions.extend(transfers)
73
-
74
- # Track the daily number of agents
75
- current_date = ""
76
- creation_event = service_registry.events.CreateService.create_filter(from_block=0, argument_filters={'serviceId': service_id}).get_all_entries()
77
- if creation_event:
78
- block_number = creation_event[0]['blockNumber']
79
- block = web3.eth.get_block(block_number)
80
- creation_timestamp = datetime.fromtimestamp(block['timestamp'])
81
- date_str = creation_timestamp.strftime('%Y-%m-%d')
82
- current_date = date_str
83
-
84
- # Ensure each agent is only counted once based on first registered date
85
- if agent_address not in seen_agents:
86
- seen_agents.add(agent_address)
87
- if date_str not in daily_agent_counts:
88
- daily_agent_counts[date_str] = set()
89
- daily_agent_counts[date_str].add(agent_address)
90
- daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()}
91
- return aggregated_transactions, daily_agent_counts
92
 
93
  # Function to parse the transaction data and prepare it for visualization
94
  def process_transactions_and_agents(data):
95
- transactions, daily_agent_counts = data
96
-
97
- # Convert the data into a pandas DataFrame for easy manipulation
98
- rows = []
99
- for tx in transactions:
100
- # Normalize amounts
101
- sending_amount = float(tx["sending"]["amount"]) / (10 ** tx["sending"]["token"]["decimals"])
102
- receiving_amount = float(tx["receiving"]["amount"]) / (10 ** tx["receiving"]["token"]["decimals"])
103
-
104
- # Convert timestamps to datetime objects
105
- sending_timestamp = datetime.utcfromtimestamp(tx["sending"]["timestamp"])
106
- receiving_timestamp = datetime.utcfromtimestamp(tx["receiving"]["timestamp"])
107
-
108
- # Prepare row data
109
- rows.append({
110
- "transactionId": tx["transactionId"],
111
- "from_address": tx["fromAddress"],
112
- "to_address": tx["toAddress"],
113
- "sending_chain": tx["sending"]["chainId"],
114
- "receiving_chain": tx["receiving"]["chainId"],
115
- "sending_token_symbol": tx["sending"]["token"]["symbol"],
116
- "receiving_token_symbol": tx["receiving"]["token"]["symbol"],
117
- "sending_amount": sending_amount,
118
- "receiving_amount": receiving_amount,
119
- "sending_amount_usd": float(tx["sending"]["amountUSD"]),
120
- "receiving_amount_usd": float(tx["receiving"]["amountUSD"]),
121
- "sending_gas_used": int(tx["sending"]["gasUsed"]),
122
- "receiving_gas_used": int(tx["receiving"]["gasUsed"]),
123
- "sending_timestamp": sending_timestamp,
124
- "receiving_timestamp": receiving_timestamp,
125
- "date": sending_timestamp.date(), # Group by day
126
- "week": sending_timestamp.strftime('%Y-%m-%d') # Group by week
127
- })
128
-
129
- df_transactions = pd.DataFrame(rows)
130
- df_transactions = df_transactions.drop_duplicates()
131
- df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count'])
132
- df_agents['date'] = pd.to_datetime(df_agents['date'])
133
- df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time)
134
-
135
- df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index()
136
-
137
  return df_transactions, df_agents, df_agents_weekly
138
 
139
  # Function to create visualizations based on the metrics
140
  def create_visualizations():
 
 
141
  transactions_data = fetch_and_aggregate_transactions()
142
  df_transactions, df_agents, df_agents_weekly = process_transactions_and_agents(transactions_data)
143
 
@@ -356,31 +314,77 @@ def create_visualizations():
356
  )
357
 
358
  return fig_swaps_chain, fig_bridges_chain, fig_agents_registered,fig_tvl
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
359
 
360
  # Gradio interface
361
  def dashboard():
362
  with gr.Blocks() as demo:
363
- gr.Markdown("# Valory Transactions Dashboard")
364
- with gr.Tab("Chain Daily activity"):
365
- fig_tx_chain = create_transcation_visualizations()
366
- gr.Plot(fig_tx_chain)
367
-
368
- fig_swaps_chain, fig_bridges_chain, fig_agents_registered,fig_tvl = create_visualizations()
369
- with gr.Tab("Swaps Daily"):
370
- gr.Plot(fig_swaps_chain)
371
 
372
- with gr.Tab("Bridges Daily"):
373
- gr.Plot(fig_bridges_chain)
374
-
375
- with gr.Tab("Nr of Agents Registered"):
376
- gr.Plot(fig_agents_registered)
377
-
378
- with gr.Tab("DAA"):
379
- fig_agents_with_transactions_daily = create_active_agents_visualizations()
380
- gr.Plot(fig_agents_with_transactions_daily)
381
-
382
- with gr.Tab("Total Value Locked"):
383
- gr.Plot(fig_tvl)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
384
 
385
  return demo
386
 
 
5
  import plotly.express as px
6
  from datetime import datetime, timedelta
7
  import json
8
+ # Commenting out blockchain-related imports that cause loading issues
9
+ # from web3 import Web3
10
  import os
11
+ import numpy as np
12
+ import matplotlib.pyplot as plt
13
+ import matplotlib.dates as mdates
14
+ import random
15
+ # Comment out the import for now and replace with dummy functions
16
+ # from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
17
+ # Import APR visualization functions from the new module
18
+ from apr_visualization import generate_apr_visualizations
19
+
20
+ # Create dummy functions for the commented out imports
21
+ def create_transcation_visualizations():
22
+ """Dummy implementation that returns a placeholder graph"""
23
+ fig = go.Figure()
24
+ fig.add_annotation(
25
+ text="Blockchain data loading disabled - placeholder visualization",
26
+ x=0.5, y=0.5, xref="paper", yref="paper",
27
+ showarrow=False, font=dict(size=20)
28
+ )
29
+ return fig
30
+
31
+ def create_active_agents_visualizations():
32
+ """Dummy implementation that returns a placeholder graph"""
33
+ fig = go.Figure()
34
+ fig.add_annotation(
35
+ text="Blockchain data loading disabled - placeholder visualization",
36
+ x=0.5, y=0.5, xref="paper", yref="paper",
37
+ showarrow=False, font=dict(size=20)
38
+ )
39
+ return fig
40
+
41
+ # Comment out the blockchain connection code
42
+ """
43
  # Load environment variables from .env file
44
  # RPC URLs
45
  OPTIMISM_RPC_URL = os.getenv('OPTIMISM_RPC_URL')
 
73
  raise Exception(f"Failed to connect to the {chain_name.capitalize()} network.")
74
  else:
75
  print(f"Successfully connected to the {chain_name.capitalize()} network.")
76
+ """
77
 
78
+ # Dummy blockchain functions to replace the commented ones
79
  def get_transfers(integrator: str, wallet: str) -> str:
80
+ """Dummy function that returns an empty result"""
81
+ return {"transfers": []}
 
 
82
 
83
  def fetch_and_aggregate_transactions():
84
+ """Dummy function that returns empty data"""
85
+ return [], {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
 
87
  # Function to parse the transaction data and prepare it for visualization
88
  def process_transactions_and_agents(data):
89
+ """Dummy function that returns empty dataframes"""
90
+ df_transactions = pd.DataFrame()
91
+ df_agents = pd.DataFrame(columns=['date', 'agent_count'])
92
+ df_agents_weekly = pd.DataFrame()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
93
  return df_transactions, df_agents, df_agents_weekly
94
 
95
  # Function to create visualizations based on the metrics
96
  def create_visualizations():
97
+ """
98
+ # Commenting out the original visualization code temporarily for debugging
99
  transactions_data = fetch_and_aggregate_transactions()
100
  df_transactions, df_agents, df_agents_weekly = process_transactions_and_agents(transactions_data)
101
 
 
314
  )
315
 
316
  return fig_swaps_chain, fig_bridges_chain, fig_agents_registered,fig_tvl
317
+ """
318
+ # Placeholder figures for testing
319
+ fig_swaps_chain = go.Figure()
320
+ fig_swaps_chain.add_annotation(
321
+ text="Blockchain data loading disabled - placeholder visualization",
322
+ x=0.5, y=0.5, xref="paper", yref="paper",
323
+ showarrow=False, font=dict(size=20)
324
+ )
325
+
326
+ fig_bridges_chain = go.Figure()
327
+ fig_bridges_chain.add_annotation(
328
+ text="Blockchain data loading disabled - placeholder visualization",
329
+ x=0.5, y=0.5, xref="paper", yref="paper",
330
+ showarrow=False, font=dict(size=20)
331
+ )
332
+
333
+ fig_agents_registered = go.Figure()
334
+ fig_agents_registered.add_annotation(
335
+ text="Blockchain data loading disabled - placeholder visualization",
336
+ x=0.5, y=0.5, xref="paper", yref="paper",
337
+ showarrow=False, font=dict(size=20)
338
+ )
339
+
340
+ fig_tvl = go.Figure()
341
+ fig_tvl.add_annotation(
342
+ text="Blockchain data loading disabled - placeholder visualization",
343
+ x=0.5, y=0.5, xref="paper", yref="paper",
344
+ showarrow=False, font=dict(size=20)
345
+ )
346
+
347
+ return fig_swaps_chain, fig_bridges_chain, fig_agents_registered, fig_tvl
348
 
349
  # Gradio interface
350
  def dashboard():
351
  with gr.Blocks() as demo:
352
+ gr.Markdown("# Valory APR Metrics")
 
 
 
 
 
 
 
353
 
354
+ # APR Metrics tab - the only tab
355
+ with gr.Tab("APR Metrics"):
356
+ with gr.Column():
357
+ refresh_btn = gr.Button("Refresh APR Data")
358
+
359
+ # Create containers for plotly figures
360
+ per_agent_graph = gr.Plot(label="APR Per Agent")
361
+ combined_graph = gr.Plot(label="Combined APR (All Agents)")
362
+
363
+ # Function to update both graphs
364
+ def update_apr_graphs():
365
+ # Generate visualizations and get figure objects directly
366
+ per_agent_fig, combined_fig, _ = generate_apr_visualizations()
367
+ return per_agent_fig, combined_fig
368
+
369
+ # Set up the button click event
370
+ refresh_btn.click(
371
+ fn=update_apr_graphs,
372
+ inputs=[],
373
+ outputs=[per_agent_graph, combined_graph]
374
+ )
375
+
376
+ # Initialize the graphs on load
377
+ # We'll use placeholder figures initially
378
+ import plotly.graph_objects as go
379
+ placeholder_fig = go.Figure()
380
+ placeholder_fig.add_annotation(
381
+ text="Click 'Refresh APR Data' to load APR graphs",
382
+ x=0.5, y=0.5,
383
+ showarrow=False,
384
+ font=dict(size=15)
385
+ )
386
+ per_agent_graph.value = placeholder_fig
387
+ combined_graph.value = placeholder_fig
388
 
389
  return demo
390
 
apr_visualization.py ADDED
@@ -0,0 +1,588 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ import matplotlib.dates as mdates
5
+ import plotly.graph_objects as go
6
+ import plotly.express as px
7
+ from plotly.subplots import make_subplots
8
+ import random
9
+ from datetime import datetime, timedelta
10
+ import requests
11
+ import sys
12
+ import json
13
+ from typing import List, Dict, Any
14
+
15
+ # Global variable to store the data for reuse
16
+ global_df = None
17
+
18
+ # Configuration
19
+ API_BASE_URL = "http://65.0.131.34:8000"
20
+
21
+ def get_agent_type_by_name(type_name: str) -> Dict[str, Any]:
22
+ """Get agent type by name"""
23
+ response = requests.get(f"{API_BASE_URL}/api/agent-types/name/{type_name}")
24
+ if response.status_code == 404:
25
+ print(f"Error: Agent type '{type_name}' not found")
26
+ return None
27
+ response.raise_for_status()
28
+ return response.json()
29
+
30
+ def get_attribute_definition_by_name(attr_name: str) -> Dict[str, Any]:
31
+ """Get attribute definition by name"""
32
+ response = requests.get(f"{API_BASE_URL}/api/attributes/name/{attr_name}")
33
+ if response.status_code == 404:
34
+ print(f"Error: Attribute definition '{attr_name}' not found")
35
+ return None
36
+ response.raise_for_status()
37
+ return response.json()
38
+
39
+ def get_agents_by_type(type_id: int) -> List[Dict[str, Any]]:
40
+ """Get all agents of a specific type"""
41
+ response = requests.get(f"{API_BASE_URL}/api/agent-types/{type_id}/agents/")
42
+ if response.status_code == 404:
43
+ print(f"No agents found for type ID {type_id}")
44
+ return []
45
+ response.raise_for_status()
46
+ return response.json()
47
+
48
+ def get_attribute_values_by_type_and_attr(agents: List[Dict[str, Any]], attr_def_id: int) -> List[Dict[str, Any]]:
49
+ """Get all attribute values for a specific attribute definition across all agents of a given list"""
50
+ all_attributes = []
51
+
52
+ # For each agent, get their attributes and filter for the one we want
53
+ for agent in agents:
54
+ agent_id = agent["agent_id"]
55
+
56
+ # Call the /api/agents/{agent_id}/attributes/ endpoint
57
+ response = requests.get(f"{API_BASE_URL}/api/agents/{agent_id}/attributes/", params={"limit": 1000})
58
+ if response.status_code == 404:
59
+ print(f"No attributes found for agent ID {agent_id}")
60
+ continue
61
+
62
+ try:
63
+ response.raise_for_status()
64
+ agent_attrs = response.json()
65
+
66
+ # Filter for the specific attribute definition ID
67
+ filtered_attrs = [attr for attr in agent_attrs if attr.get("attr_def_id") == attr_def_id]
68
+ all_attributes.extend(filtered_attrs)
69
+ except requests.exceptions.RequestException as e:
70
+ print(f"Error fetching attributes for agent ID {agent_id}: {e}")
71
+
72
+ return all_attributes
73
+
74
+ def get_agent_name(agent_id: int, agents: List[Dict[str, Any]]) -> str:
75
+ """Get agent name from agent ID"""
76
+ for agent in agents:
77
+ if agent["agent_id"] == agent_id:
78
+ return agent["agent_name"]
79
+ return "Unknown"
80
+
81
+ def extract_apr_value(attr: Dict[str, Any]) -> Dict[str, Any]:
82
+ """Extract APR value and timestamp from JSON value"""
83
+ try:
84
+ # The APR value is stored in the json_value field
85
+ if attr["json_value"] is None:
86
+ return {"apr": None, "timestamp": None, "agent_id": attr["agent_id"], "is_dummy": False}
87
+
88
+ # If json_value is a string, parse it
89
+ if isinstance(attr["json_value"], str):
90
+ json_data = json.loads(attr["json_value"])
91
+ else:
92
+ json_data = attr["json_value"]
93
+
94
+ apr = json_data.get("apr")
95
+ timestamp = json_data.get("timestamp")
96
+
97
+ # Convert timestamp to datetime if it exists
98
+ timestamp_dt = None
99
+ if timestamp:
100
+ timestamp_dt = datetime.fromtimestamp(timestamp)
101
+
102
+ return {"apr": apr, "timestamp": timestamp_dt, "agent_id": attr["agent_id"], "is_dummy": False}
103
+ except (json.JSONDecodeError, KeyError, TypeError) as e:
104
+ print(f"Error parsing JSON value: {e}")
105
+ return {"apr": None, "timestamp": None, "agent_id": attr["agent_id"], "is_dummy": False}
106
+
107
+ def fetch_apr_data_from_db():
108
+ """
109
+ Fetch APR data from database using the API.
110
+ """
111
+ global global_df
112
+
113
+ try:
114
+ # Step 1: Find the Modius agent type
115
+ modius_type = get_agent_type_by_name("Modius")
116
+ if not modius_type:
117
+ print("Modius agent type not found, using placeholder data")
118
+ global_df = pd.DataFrame([])
119
+ return global_df
120
+
121
+ type_id = modius_type["type_id"]
122
+
123
+ # Step 2: Find the APR attribute definition
124
+ apr_attr_def = get_attribute_definition_by_name("APR")
125
+ if not apr_attr_def:
126
+ print("APR attribute definition not found, using placeholder data")
127
+ global_df = pd.DataFrame([])
128
+ return global_df
129
+
130
+ attr_def_id = apr_attr_def["attr_def_id"]
131
+
132
+ # Step 3: Get all agents of type Modius
133
+ modius_agents = get_agents_by_type(type_id)
134
+ if not modius_agents:
135
+ print("No agents of type 'Modius' found")
136
+ global_df = pd.DataFrame([])
137
+ return global_df
138
+
139
+ # Step 4: Fetch all APR values for Modius agents
140
+ apr_attributes = get_attribute_values_by_type_and_attr(modius_agents, attr_def_id)
141
+ if not apr_attributes:
142
+ print("No APR values found for 'Modius' agents")
143
+ global_df = pd.DataFrame([])
144
+ return global_df
145
+
146
+ # Step 5: Extract APR data
147
+ apr_data_list = []
148
+ for attr in apr_attributes:
149
+ apr_data = extract_apr_value(attr)
150
+ if apr_data["apr"] is not None and apr_data["timestamp"] is not None:
151
+ # Get agent name
152
+ agent_name = get_agent_name(attr["agent_id"], modius_agents)
153
+ # Add agent name to the data
154
+ apr_data["agent_name"] = agent_name
155
+ # Add is_dummy flag (all real data)
156
+ apr_data["is_dummy"] = False
157
+
158
+ # Mark negative values as "Performance" metrics
159
+ if apr_data["apr"] < 0:
160
+ apr_data["metric_type"] = "Performance"
161
+ else:
162
+ apr_data["metric_type"] = "APR"
163
+
164
+ apr_data_list.append(apr_data)
165
+
166
+ # Convert list of dictionaries to DataFrame
167
+ if not apr_data_list:
168
+ print("No valid APR data extracted")
169
+ global_df = pd.DataFrame([])
170
+ return global_df
171
+
172
+ global_df = pd.DataFrame(apr_data_list)
173
+ return global_df
174
+
175
+ except requests.exceptions.RequestException as e:
176
+ print(f"API request error: {e}")
177
+ global_df = pd.DataFrame([])
178
+ return global_df
179
+ except Exception as e:
180
+ print(f"Error fetching APR data: {e}")
181
+ global_df = pd.DataFrame([])
182
+ return global_df
183
+
184
+ def generate_apr_visualizations():
185
+ """Generate APR visualizations with real data only (no dummy data)"""
186
+ global global_df
187
+
188
+ # Fetch data from database
189
+ df = fetch_apr_data_from_db()
190
+
191
+ # If we got no data at all, return placeholder figures
192
+ if df.empty:
193
+ print("No APR data available. Using fallback visualization.")
194
+ # Create empty visualizations with a message using Plotly
195
+ fig = go.Figure()
196
+ fig.add_annotation(
197
+ x=0.5, y=0.5,
198
+ text="No APR data available",
199
+ font=dict(size=20),
200
+ showarrow=False
201
+ )
202
+ fig.update_layout(
203
+ xaxis=dict(showgrid=False, zeroline=False, showticklabels=False),
204
+ yaxis=dict(showgrid=False, zeroline=False, showticklabels=False)
205
+ )
206
+
207
+ # Save as static files for reference
208
+ fig.write_html("modius_apr_per_agent_graph.html")
209
+ fig.write_image("modius_apr_per_agent_graph.png")
210
+ fig.write_html("modius_apr_combined_graph.html")
211
+ fig.write_image("modius_apr_combined_graph.png")
212
+
213
+ csv_file = None
214
+ return fig, fig, csv_file
215
+
216
+ # No longer generating dummy data
217
+ # Set global_df for access by other functions
218
+ global_df = df
219
+
220
+ # Save to CSV before creating visualizations
221
+ csv_file = save_to_csv(df)
222
+
223
+ # Create per-agent time series graph (returns figure object)
224
+ per_agent_fig = create_time_series_graph_per_agent(df)
225
+
226
+ # Create combined time series graph (returns figure object)
227
+ combined_fig = create_combined_time_series_graph(df)
228
+
229
+ return per_agent_fig, combined_fig, csv_file
230
+
231
+ def create_time_series_graph_per_agent(df):
232
+ """Create a time series graph for each agent using Plotly"""
233
+ # Get unique agents
234
+ unique_agents = df['agent_id'].unique()
235
+
236
+ if len(unique_agents) == 0:
237
+ print("No agent data to plot")
238
+ fig = go.Figure()
239
+ fig.add_annotation(
240
+ text="No agent data available",
241
+ x=0.5, y=0.5,
242
+ showarrow=False, font=dict(size=20)
243
+ )
244
+ return fig
245
+
246
+ # Create a subplot figure for each agent
247
+ fig = make_subplots(rows=len(unique_agents), cols=1,
248
+ subplot_titles=[f"Agent: {df[df['agent_id'] == agent_id]['agent_name'].iloc[0]}"
249
+ for agent_id in unique_agents],
250
+ vertical_spacing=0.1)
251
+
252
+ # Plot data for each agent
253
+ for i, agent_id in enumerate(unique_agents):
254
+ agent_data = df[df['agent_id'] == agent_id].copy()
255
+ agent_name = agent_data['agent_name'].iloc[0]
256
+ row = i + 1
257
+
258
+ # Add zero line to separate APR and Performance
259
+ fig.add_shape(
260
+ type="line", line=dict(dash="solid", width=1.5, color="black"),
261
+ y0=0, y1=0, x0=agent_data['timestamp'].min(), x1=agent_data['timestamp'].max(),
262
+ row=row, col=1
263
+ )
264
+
265
+ # Add background colors
266
+ fig.add_shape(
267
+ type="rect", fillcolor="rgba(230, 243, 255, 0.3)", line=dict(width=0),
268
+ y0=0, y1=1000, x0=agent_data['timestamp'].min(), x1=agent_data['timestamp'].max(),
269
+ row=row, col=1, layer="below"
270
+ )
271
+ fig.add_shape(
272
+ type="rect", fillcolor="rgba(255, 230, 230, 0.3)", line=dict(width=0),
273
+ y0=-1000, y1=0, x0=agent_data['timestamp'].min(), x1=agent_data['timestamp'].max(),
274
+ row=row, col=1, layer="below"
275
+ )
276
+
277
+ # Create separate dataframes for different data types
278
+ apr_data = agent_data[agent_data['metric_type'] == 'APR']
279
+ perf_data = agent_data[agent_data['metric_type'] == 'Performance']
280
+
281
+ # Sort all data by timestamp for the line plots
282
+ combined_agent_data = agent_data.sort_values('timestamp')
283
+
284
+ # Add main line connecting all points
285
+ fig.add_trace(
286
+ go.Scatter(
287
+ x=combined_agent_data['timestamp'],
288
+ y=combined_agent_data['apr'],
289
+ mode='lines',
290
+ line=dict(color='purple', width=2),
291
+ name=f'{agent_name}',
292
+ legendgroup=agent_name,
293
+ showlegend=(i == 0), # Only show in legend once
294
+ hovertemplate='Time: %{x}<br>Value: %{y:.2f}<extra></extra>'
295
+ ),
296
+ row=row, col=1
297
+ )
298
+
299
+ # Add scatter points for APR values
300
+ if not apr_data.empty:
301
+ fig.add_trace(
302
+ go.Scatter(
303
+ x=apr_data['timestamp'],
304
+ y=apr_data['apr'],
305
+ mode='markers',
306
+ marker=dict(color='blue', size=10, symbol='circle'),
307
+ name='APR',
308
+ legendgroup='APR',
309
+ showlegend=(i == 0),
310
+ hovertemplate='Time: %{x}<br>APR: %{y:.2f}<extra></extra>'
311
+ ),
312
+ row=row, col=1
313
+ )
314
+
315
+ # Add scatter points for Performance values
316
+ if not perf_data.empty:
317
+ fig.add_trace(
318
+ go.Scatter(
319
+ x=perf_data['timestamp'],
320
+ y=perf_data['apr'],
321
+ mode='markers',
322
+ marker=dict(color='red', size=10, symbol='square'),
323
+ name='Performance',
324
+ legendgroup='Performance',
325
+ showlegend=(i == 0),
326
+ hovertemplate='Time: %{x}<br>Performance: %{y:.2f}<extra></extra>'
327
+ ),
328
+ row=row, col=1
329
+ )
330
+
331
+ # Update axes
332
+ fig.update_xaxes(title_text="Time", row=row, col=1)
333
+ fig.update_yaxes(title_text="Value", row=row, col=1, gridcolor='rgba(0,0,0,0.1)')
334
+
335
+ # Update layout
336
+ fig.update_layout(
337
+ height=400 * len(unique_agents),
338
+ width=1000,
339
+ title_text="APR and Performance Values per Agent",
340
+ template="plotly_white",
341
+ legend=dict(
342
+ orientation="h",
343
+ yanchor="bottom",
344
+ y=1.02,
345
+ xanchor="right",
346
+ x=1
347
+ ),
348
+ margin=dict(r=20, l=20, t=30, b=20),
349
+ hovermode="closest"
350
+ )
351
+
352
+ # Save the figure (still useful for reference)
353
+ graph_file = "modius_apr_per_agent_graph.html"
354
+ fig.write_html(graph_file, include_plotlyjs='cdn', full_html=False)
355
+
356
+ # Also save as image for compatibility
357
+ img_file = "modius_apr_per_agent_graph.png"
358
+ fig.write_image(img_file)
359
+
360
+ print(f"Per-agent graph saved to {graph_file} and {img_file}")
361
+
362
+ # Return the figure object for direct use in Gradio
363
+ return fig
364
+
365
+ def create_combined_time_series_graph(df):
366
+ """Create a combined time series graph for all agents using Plotly"""
367
+ if len(df) == 0:
368
+ print("No data to plot combined graph")
369
+ fig = go.Figure()
370
+ fig.add_annotation(
371
+ text="No data available",
372
+ x=0.5, y=0.5,
373
+ showarrow=False, font=dict(size=20)
374
+ )
375
+ return fig
376
+
377
+ # Create Plotly figure
378
+ fig = go.Figure()
379
+
380
+ # Get unique agents
381
+ unique_agents = df['agent_id'].unique()
382
+
383
+ # Define a color scale for different agents
384
+ colors = px.colors.qualitative.Plotly[:len(unique_agents)]
385
+
386
+ # Add background shapes for APR and Performance regions
387
+ min_time = df['timestamp'].min()
388
+ max_time = df['timestamp'].max()
389
+
390
+ # Add shape for APR region (above zero)
391
+ fig.add_shape(
392
+ type="rect",
393
+ fillcolor="rgba(230, 243, 255, 0.3)",
394
+ line=dict(width=0),
395
+ y0=0, y1=1000,
396
+ x0=min_time, x1=max_time,
397
+ layer="below"
398
+ )
399
+
400
+ # Add shape for Performance region (below zero)
401
+ fig.add_shape(
402
+ type="rect",
403
+ fillcolor="rgba(255, 230, 230, 0.3)",
404
+ line=dict(width=0),
405
+ y0=-1000, y1=0,
406
+ x0=min_time, x1=max_time,
407
+ layer="below"
408
+ )
409
+
410
+ # Add zero line
411
+ fig.add_shape(
412
+ type="line",
413
+ line=dict(dash="solid", width=1.5, color="black"),
414
+ y0=0, y1=0,
415
+ x0=min_time, x1=max_time
416
+ )
417
+
418
+ # Add data for each agent
419
+ for i, agent_id in enumerate(unique_agents):
420
+ agent_data = df[df['agent_id'] == agent_id].copy()
421
+ agent_name = agent_data['agent_name'].iloc[0]
422
+ color = colors[i % len(colors)]
423
+
424
+ # Sort the data by timestamp
425
+ agent_data = agent_data.sort_values('timestamp')
426
+
427
+ # Add the combined line for both APR and Performance
428
+ fig.add_trace(
429
+ go.Scatter(
430
+ x=agent_data['timestamp'],
431
+ y=agent_data['apr'],
432
+ mode='lines',
433
+ line=dict(color=color, width=2),
434
+ name=f'{agent_name}',
435
+ legendgroup=agent_name,
436
+ hovertemplate='Time: %{x}<br>Value: %{y:.2f}<br>Agent: ' + agent_name + '<extra></extra>'
437
+ )
438
+ )
439
+
440
+ # Add scatter points for APR values
441
+ apr_data = agent_data[agent_data['metric_type'] == 'APR']
442
+ if not apr_data.empty:
443
+ fig.add_trace(
444
+ go.Scatter(
445
+ x=apr_data['timestamp'],
446
+ y=apr_data['apr'],
447
+ mode='markers',
448
+ marker=dict(color=color, symbol='circle', size=8),
449
+ name=f'{agent_name} APR',
450
+ legendgroup=agent_name,
451
+ showlegend=False,
452
+ hovertemplate='Time: %{x}<br>APR: %{y:.2f}<br>Agent: ' + agent_name + '<extra></extra>'
453
+ )
454
+ )
455
+
456
+ # Add scatter points for Performance values
457
+ perf_data = agent_data[agent_data['metric_type'] == 'Performance']
458
+ if not perf_data.empty:
459
+ fig.add_trace(
460
+ go.Scatter(
461
+ x=perf_data['timestamp'],
462
+ y=perf_data['apr'],
463
+ mode='markers',
464
+ marker=dict(color=color, symbol='square', size=8),
465
+ name=f'{agent_name} Perf',
466
+ legendgroup=agent_name,
467
+ showlegend=False,
468
+ hovertemplate='Time: %{x}<br>Performance: %{y:.2f}<br>Agent: ' + agent_name + '<extra></extra>'
469
+ )
470
+ )
471
+
472
+ # Update layout
473
+ fig.update_layout(
474
+ title="APR and Performance Values for All Agents",
475
+ xaxis_title="Time",
476
+ yaxis_title="Value",
477
+ template="plotly_white",
478
+ height=600,
479
+ width=1000,
480
+ legend=dict(
481
+ orientation="h",
482
+ yanchor="bottom",
483
+ y=1.02,
484
+ xanchor="right",
485
+ x=1,
486
+ groupclick="toggleitem"
487
+ ),
488
+ margin=dict(r=20, l=20, t=30, b=20),
489
+ hovermode="closest"
490
+ )
491
+
492
+ # Update axes
493
+ fig.update_xaxes(showgrid=True, gridwidth=1, gridcolor='rgba(0,0,0,0.1)')
494
+ fig.update_yaxes(showgrid=True, gridwidth=1, gridcolor='rgba(0,0,0,0.1)')
495
+
496
+ # Save the figure (still useful for reference)
497
+ graph_file = "modius_apr_combined_graph.html"
498
+ fig.write_html(graph_file, include_plotlyjs='cdn', full_html=False)
499
+
500
+ # Also save as image for compatibility
501
+ img_file = "modius_apr_combined_graph.png"
502
+ fig.write_image(img_file)
503
+
504
+ print(f"Combined graph saved to {graph_file} and {img_file}")
505
+
506
+ # Return the figure object for direct use in Gradio
507
+ return fig
508
+
509
+ def save_to_csv(df):
510
+ """Save the APR data DataFrame to a CSV file and return the file path"""
511
+ if df.empty:
512
+ print("No APR data to save to CSV")
513
+ return None
514
+
515
+ # Define the CSV file path
516
+ csv_file = "modius_apr_values.csv"
517
+
518
+ # Save to CSV
519
+ df.to_csv(csv_file, index=False)
520
+ print(f"APR data saved to {csv_file}")
521
+
522
+ # Also generate a statistics CSV file
523
+ stats_df = generate_statistics_from_data(df)
524
+ stats_csv = "modius_apr_statistics.csv"
525
+ stats_df.to_csv(stats_csv, index=False)
526
+ print(f"Statistics saved to {stats_csv}")
527
+
528
+ return csv_file
529
+
530
+ def generate_statistics_from_data(df):
531
+ """Generate statistics from the APR data"""
532
+ if df.empty:
533
+ return pd.DataFrame()
534
+
535
+ # Get unique agents
536
+ unique_agents = df['agent_id'].unique()
537
+ stats_list = []
538
+
539
+ # Generate per-agent statistics
540
+ for agent_id in unique_agents:
541
+ agent_data = df[df['agent_id'] == agent_id]
542
+ agent_name = agent_data['agent_name'].iloc[0]
543
+
544
+ # APR statistics
545
+ apr_data = agent_data[agent_data['metric_type'] == 'APR']
546
+ real_apr = apr_data[apr_data['is_dummy'] == False]
547
+
548
+ # Performance statistics
549
+ perf_data = agent_data[agent_data['metric_type'] == 'Performance']
550
+ real_perf = perf_data[perf_data['is_dummy'] == False]
551
+
552
+ stats = {
553
+ 'agent_id': agent_id,
554
+ 'agent_name': agent_name,
555
+ 'total_points': len(agent_data),
556
+ 'apr_points': len(apr_data),
557
+ 'performance_points': len(perf_data),
558
+ 'real_apr_points': len(real_apr),
559
+ 'real_performance_points': len(real_perf),
560
+ 'avg_apr': apr_data['apr'].mean() if not apr_data.empty else None,
561
+ 'avg_performance': perf_data['apr'].mean() if not perf_data.empty else None,
562
+ 'max_apr': apr_data['apr'].max() if not apr_data.empty else None,
563
+ 'min_apr': apr_data['apr'].min() if not apr_data.empty else None,
564
+ 'latest_timestamp': agent_data['timestamp'].max().strftime('%Y-%m-%d %H:%M:%S') if not agent_data.empty else None
565
+ }
566
+ stats_list.append(stats)
567
+
568
+ # Generate overall statistics
569
+ apr_only = df[df['metric_type'] == 'APR']
570
+ perf_only = df[df['metric_type'] == 'Performance']
571
+
572
+ overall_stats = {
573
+ 'agent_id': 'ALL',
574
+ 'agent_name': 'All Agents',
575
+ 'total_points': len(df),
576
+ 'apr_points': len(apr_only),
577
+ 'performance_points': len(perf_only),
578
+ 'real_apr_points': len(apr_only[apr_only['is_dummy'] == False]),
579
+ 'real_performance_points': len(perf_only[perf_only['is_dummy'] == False]),
580
+ 'avg_apr': apr_only['apr'].mean() if not apr_only.empty else None,
581
+ 'avg_performance': perf_only['apr'].mean() if not perf_only.empty else None,
582
+ 'max_apr': apr_only['apr'].max() if not apr_only.empty else None,
583
+ 'min_apr': apr_only['apr'].min() if not apr_only.empty else None,
584
+ 'latest_timestamp': df['timestamp'].max().strftime('%Y-%m-%d %H:%M:%S') if not df.empty else None
585
+ }
586
+ stats_list.append(overall_stats)
587
+
588
+ return pd.DataFrame(stats_list)