Spaces:
Sleeping
Sleeping
visualize the analytics
Browse files- clients.py +55 -58
clients.py
CHANGED
@@ -1,8 +1,11 @@
|
|
1 |
import os
|
|
|
|
|
2 |
import streamlit as st
|
3 |
import pandas as pd
|
4 |
import requests
|
5 |
import datetime
|
|
|
6 |
cold_host = os.getenv("backend_cold")
|
7 |
hook_host = os.getenv("hook_host") # Corrected here
|
8 |
rengagment_host = os.getenv("rengagement_host")
|
@@ -348,27 +351,30 @@ def RengagmentEmail(email_receiver):
|
|
348 |
return None
|
349 |
|
350 |
|
351 |
-
|
|
|
|
|
|
|
352 |
"""
|
353 |
Fetches analytics data from the specified host, processes it, and aggregates by week.
|
354 |
|
355 |
Parameters:
|
356 |
host (str): The host URL to query.
|
357 |
-
|
358 |
-
|
|
|
|
|
359 |
|
360 |
Returns:
|
361 |
-
pd.DataFrame: The processed and aggregated analytics data.
|
362 |
"""
|
363 |
endpoint = f"{host}/analytics/"
|
364 |
response = requests.get(endpoint)
|
365 |
|
366 |
if response.status_code != 200:
|
367 |
-
#st.error(f"Failed to fetch analytics data from {host}")
|
368 |
return pd.DataFrame()
|
369 |
|
370 |
data = response.json()
|
371 |
-
#st.write(data)
|
372 |
|
373 |
# Process the fetched data
|
374 |
records = []
|
@@ -382,77 +388,68 @@ def fetch_analytics_data(host, start_date=None, end_date=None):
|
|
382 |
|
383 |
df = pd.DataFrame(records)
|
384 |
|
385 |
-
# Debug: Display the DataFrame structure
|
386 |
-
#st.write("Fetched DataFrame structure:", df.head())
|
387 |
-
|
388 |
-
# Check if DataFrame is empty
|
389 |
if df.empty:
|
390 |
-
#st.warning("No data available for the specified date range.")
|
391 |
return df
|
392 |
|
393 |
-
#
|
394 |
-
|
395 |
-
df = df[df['timestamp'] >= pd.to_datetime(start_date)]
|
396 |
-
if end_date:
|
397 |
-
df = df[df['timestamp'] <= pd.to_datetime(end_date)]
|
398 |
|
399 |
-
#
|
400 |
-
df['
|
401 |
-
|
402 |
|
403 |
-
|
404 |
-
|
405 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
406 |
|
407 |
def display_analytics():
|
408 |
"""
|
409 |
-
Displays the analytics data with a time filter.
|
410 |
"""
|
411 |
cold_host = os.getenv("backend_cold")
|
412 |
rengagement_host = os.getenv("rengagement_host")
|
413 |
hook_host = os.getenv("hook_host")
|
414 |
|
415 |
-
# Calculate the start and end of
|
416 |
today = datetime.date.today()
|
417 |
-
|
418 |
-
|
|
|
|
|
419 |
|
420 |
-
|
421 |
-
|
422 |
-
|
423 |
-
end_date = st.date_input("End Date", value=end_of_week)
|
424 |
|
425 |
-
|
426 |
-
|
427 |
-
|
|
|
428 |
|
429 |
-
#
|
430 |
-
def
|
431 |
-
if df.empty
|
432 |
-
|
433 |
-
return
|
434 |
|
435 |
-
|
436 |
-
|
437 |
-
|
438 |
-
|
439 |
|
440 |
-
|
441 |
-
|
442 |
-
hook_df = calculate_percentage_change(hook_df)
|
443 |
-
|
444 |
-
# Display dataframes side by side
|
445 |
-
#col1, col2, col3 = st.columns(3)
|
446 |
-
|
447 |
-
#with col1:
|
448 |
-
st.html('<h4 class="hero-subtitle">Cold Emails</h4>')
|
449 |
-
st.dataframe(cold_df)
|
450 |
-
#with col2:
|
451 |
-
st.html('<h4 class="hero-subtitle">Re-engagement Hooks "Hook2Lead"</h4>')
|
452 |
-
st.dataframe(hook_df)
|
453 |
|
454 |
-
|
455 |
-
|
456 |
-
st.dataframe(rengagement_df)
|
457 |
|
|
|
|
|
458 |
|
|
|
|
1 |
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
load_dotenv()
|
4 |
import streamlit as st
|
5 |
import pandas as pd
|
6 |
import requests
|
7 |
import datetime
|
8 |
+
import plotly.express as px
|
9 |
cold_host = os.getenv("backend_cold")
|
10 |
hook_host = os.getenv("hook_host") # Corrected here
|
11 |
rengagment_host = os.getenv("rengagement_host")
|
|
|
351 |
return None
|
352 |
|
353 |
|
354 |
+
|
355 |
+
|
356 |
+
|
357 |
+
def fetch_analytics_data(host, start_of_last_week, end_of_last_week, start_of_this_week, end_of_this_week):
|
358 |
"""
|
359 |
Fetches analytics data from the specified host, processes it, and aggregates by week.
|
360 |
|
361 |
Parameters:
|
362 |
host (str): The host URL to query.
|
363 |
+
start_of_last_week (datetime): The start date of last week.
|
364 |
+
end_of_last_week (datetime): The end date of last week.
|
365 |
+
start_of_this_week (datetime): The start date of this week.
|
366 |
+
end_of_this_week (datetime): The end date of this week.
|
367 |
|
368 |
Returns:
|
369 |
+
pd.DataFrame: The processed and aggregated analytics data for this week and last week.
|
370 |
"""
|
371 |
endpoint = f"{host}/analytics/"
|
372 |
response = requests.get(endpoint)
|
373 |
|
374 |
if response.status_code != 200:
|
|
|
375 |
return pd.DataFrame()
|
376 |
|
377 |
data = response.json()
|
|
|
378 |
|
379 |
# Process the fetched data
|
380 |
records = []
|
|
|
388 |
|
389 |
df = pd.DataFrame(records)
|
390 |
|
|
|
|
|
|
|
|
|
391 |
if df.empty:
|
|
|
392 |
return df
|
393 |
|
394 |
+
# Convert timestamp to datetime.date
|
395 |
+
df['day'] = df['timestamp'].dt.date
|
|
|
|
|
|
|
396 |
|
397 |
+
# Filter by date range
|
398 |
+
this_week_df = df[(df['day'] >= start_of_this_week) & (df['day'] <= end_of_this_week)]
|
399 |
+
this_week_df = this_week_df.groupby('email_receiver').agg({'Total Companies/Executives engaged': 'sum'}).reset_index()
|
400 |
|
401 |
+
last_week_df = df[(df['day'] >= start_of_last_week) & (df['day'] <= end_of_last_week)]
|
402 |
+
last_week_df = last_week_df.groupby('email_receiver').agg({'Total Companies/Executives engaged': 'sum'}).reset_index()
|
403 |
+
|
404 |
+
# Combine this week and last week data
|
405 |
+
this_week_df['Week'] = 'This Week'
|
406 |
+
last_week_df['Week'] = 'Last Week'
|
407 |
+
combined_df = pd.concat([this_week_df, last_week_df])
|
408 |
+
|
409 |
+
return combined_df
|
410 |
|
411 |
def display_analytics():
|
412 |
"""
|
413 |
+
Displays the analytics data with a time filter and plots.
|
414 |
"""
|
415 |
cold_host = os.getenv("backend_cold")
|
416 |
rengagement_host = os.getenv("rengagement_host")
|
417 |
hook_host = os.getenv("hook_host")
|
418 |
|
419 |
+
# Calculate the start and end of this week and last week
|
420 |
today = datetime.date.today()
|
421 |
+
start_of_this_week = today - datetime.timedelta(days=today.weekday())
|
422 |
+
end_of_this_week = start_of_this_week + datetime.timedelta(days=6)
|
423 |
+
start_of_last_week = start_of_this_week - datetime.timedelta(days=7)
|
424 |
+
end_of_last_week = start_of_last_week + datetime.timedelta(days=6)
|
425 |
|
426 |
+
cold_df = fetch_analytics_data(cold_host, start_of_last_week, end_of_last_week, start_of_this_week, end_of_this_week)
|
427 |
+
rengagement_df = fetch_analytics_data(rengagement_host, start_of_last_week, end_of_last_week, start_of_this_week, end_of_this_week)
|
428 |
+
hook_df = fetch_analytics_data(hook_host, start_of_last_week, end_of_last_week, start_of_this_week, end_of_this_week)
|
|
|
429 |
|
430 |
+
# Calculate the total count of all Total Companies/Executives engaged
|
431 |
+
total_engaged = (cold_df['Total Companies/Executives engaged'].sum() if not cold_df.empty else 0) + \
|
432 |
+
(rengagement_df['Total Companies/Executives engaged'].sum() if not rengagement_df.empty else 0) + \
|
433 |
+
(hook_df['Total Companies/Executives engaged'].sum() if not hook_df.empty else 0)
|
434 |
|
435 |
+
# Plotting the data
|
436 |
+
def plot_data(df, title):
|
437 |
+
if df.empty:
|
438 |
+
st.warning(f"No data available for {title}.")
|
439 |
+
return
|
440 |
|
441 |
+
fig = px.bar(df, x='email_receiver', y='Total Companies/Executives engaged', color='Week', barmode='group',
|
442 |
+
title=title, labels={'email_receiver': 'Email Receiver', 'Total Companies/Executives engaged': 'Total Companies/Executives Engaged'})
|
443 |
+
|
444 |
+
st.plotly_chart(fig)
|
445 |
|
446 |
+
st.html(f'<h4 class="hero-subtitle">This week, SalesIntel has helped in generating {total_engaged:,} personalized emails so far</h4>')
|
447 |
+
col1, col2 = st.columns(2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
448 |
|
449 |
+
with col1:
|
450 |
+
plot_data(cold_df, 'Cold Emails')
|
|
|
451 |
|
452 |
+
with col2:
|
453 |
+
plot_data(hook_df, 'Re-engagement Hooks "Hook2Lead"')
|
454 |
|
455 |
+
plot_data(rengagement_df, 'Re-engagement Emails')
|