Spaces:
Sleeping
Sleeping
from dataclasses import dataclass | |
from enum import Enum | |
from typing import Optional, Dict, Any | |
from composio_llamaindex import ComposioToolSet, App, Action | |
from datetime import datetime, timedelta | |
from collections import defaultdict, Counter | |
from llama_index_llms_openai import OpenAI | |
import gradio as gr | |
import os | |
import json | |
from dotenv import load_dotenv | |
# Load environment variables | |
load_dotenv() | |
llm = OpenAI(model='gpt-4o', api_key=os.getenv('OPENAI_API_KEY')) | |
class ConnectionStatus(Enum): | |
PENDING = "pending" | |
ACTIVE = "active" | |
FAILED = "failed" | |
NOT_FOUND = "not_found" | |
class APIResponse: | |
success: bool | |
data: Optional[Dict[str, Any]] = None | |
error: Optional[str] = None | |
def to_json(self) -> str: | |
return json.dumps({ | |
"success": self.success, | |
"data": self.data, | |
"error": self.error | |
}) | |
class CalendarService: | |
def __init__(self): | |
self.toolset = ComposioToolSet(api_key=os.getenv('COMPOSIO_API_KEY')) | |
self.connections: Dict[str, Dict[str, Any]] = {} | |
def analyze_calendar_events(self, response_data): | |
""" | |
Analyze calendar events and return statistics about meetings. | |
""" | |
current_year = datetime.now().year | |
meetings = [] | |
participants = [] | |
meeting_times = [] | |
total_duration = timedelta() | |
monthly_meetings = defaultdict(int) | |
daily_meetings = defaultdict(int) | |
events = response_data.get('data', {}).get('event_data', {}).get('event_data', []) | |
for event in events: | |
start_data = event.get('start', {}) | |
end_data = event.get('end', {}) | |
try: | |
start = datetime.fromisoformat(start_data.get('dateTime').replace('Z', '+00:00')) | |
end = datetime.fromisoformat(end_data.get('dateTime').replace('Z', '+00:00')) | |
if start.year == current_year: | |
duration = end - start | |
total_duration += duration | |
monthly_meetings[start.strftime('%B')] += 1 | |
daily_meetings[start.strftime('%A')] += 1 | |
meeting_times.append(start.strftime('%H:%M')) | |
if 'attendees' in event: | |
for attendee in event['attendees']: | |
if attendee.get('responseStatus') != 'declined': | |
participants.append(attendee.get('email')) | |
organizer_email = event.get('organizer', {}).get('email') | |
if organizer_email: | |
participants.append(organizer_email) | |
meetings.append({ | |
'start': start, | |
'duration': duration, | |
'summary': event.get('summary', 'No Title') | |
}) | |
except (ValueError, TypeError, AttributeError) as e: | |
print(f"Error processing event: {e}") | |
continue | |
total_meetings = len(meetings) | |
stats = { | |
"total_meetings_this_year": total_meetings | |
} | |
if total_meetings > 0: | |
stats.update({ | |
"total_time_spent": str(total_duration), | |
"busiest_month": max(monthly_meetings.items(), key=lambda x: x[1])[0] if monthly_meetings else "N/A", | |
"busiest_day": max(daily_meetings.items(), key=lambda x: x[1])[0] if daily_meetings else "N/A", | |
"most_frequent_participant": Counter(participants).most_common(1)[0][0] if participants else "N/A", | |
"average_meeting_duration": str(total_duration / total_meetings), | |
"most_common_meeting_time": Counter(meeting_times).most_common(1)[0][0] if meeting_times else "N/A", | |
"monthly_breakdown": dict(monthly_meetings), | |
"daily_breakdown": dict(daily_meetings) | |
}) | |
else: | |
stats.update({ | |
"total_time_spent": "0:00:00", | |
"busiest_month": "N/A", | |
"busiest_day": "N/A", | |
"most_frequent_participant": "N/A", | |
"average_meeting_duration": "0:00:00", | |
"most_common_meeting_time": "N/A", | |
"monthly_breakdown": {}, | |
"daily_breakdown": {} | |
}) | |
return stats | |
def generate_wrapped(self, entity_id: str) -> APIResponse: | |
try: | |
# Get current year's start and end dates | |
current_year = datetime.now().year | |
time_min = f"{current_year},1,1,0,0,0" | |
time_max = f"{current_year},12,31,23,59,59" | |
request_params = { | |
"calendar_id": "primary", | |
"timeMin": time_min, | |
"timeMax": time_max, | |
"single_events": True, | |
"max_results": 2500, | |
"order_by": "startTime" | |
} | |
events_response = self.toolset.execute_action( | |
action=Action.GOOGLECALENDAR_FIND_EVENT, | |
params=request_params, | |
entity_id=entity_id | |
) | |
if events_response["successfull"]: | |
stats = self.analyze_calendar_events(events_response) | |
# Create a prompt for the LLM with the stats | |
prompt = f"""Based on the following calendar statistics, analyze which tech billionaire this person's schedule most resembles and provide brief comments for each metric: | |
Stats: | |
- Total meetings this year: {stats['total_meetings_this_year']} | |
- Total time in meetings: {stats['total_time_spent']} | |
- Busiest month: {stats['busiest_month']} | |
- Busiest day: {stats['busiest_day']} | |
- Average meeting duration: {stats['average_meeting_duration']} | |
- Most common meeting time: {stats['most_common_meeting_time']} | |
- Most frequent collaborator: {stats['most_frequent_participant']} | |
Please provide: | |
1. Which tech billionaire's schedule this most resembles and why | |
2. A one-sentence comment for each of the above metrics | |
Format your response as JSON with keys: 'billionaire_match' and 'metric_comments' | |
Dont make any extra comments before or after. Just give the required output and shut up. Dont send any sentences saying like here's your response, or the output is generated""" | |
llm_analysis = llm.complete(prompt) | |
try: | |
llm_json = json.loads(llm_analysis) | |
except json.JSONDecodeError: | |
llm_json = { | |
"billionaire_match": "Analysis unavailable", | |
"metric_comments": "Comments unavailable" | |
} | |
# Add LLM analysis to stats | |
stats.update({ | |
"schedule_analysis": llm_json["billionaire_match"], | |
"metric_insights": llm_json["metric_comments"] | |
}) | |
return APIResponse( | |
success=True, | |
data=stats | |
) | |
else: | |
return APIResponse( | |
success=False, | |
error=events_response["error"] or "Failed to fetch calendar events" | |
) | |
except Exception as e: | |
return APIResponse( | |
success=False, | |
error=f"Failed to generate wrapped: {str(e)}" | |
) | |
def check_status(self, entity_id: str) -> APIResponse: | |
try: | |
if entity_id not in self.connections: | |
return APIResponse( | |
success=False, | |
error="No connection found for this entity ID" | |
) | |
connection = self.connections[entity_id] | |
return APIResponse( | |
success=True, | |
data={ | |
'status': connection['status'], | |
'message': f"Connection status: {connection['status']}" | |
} | |
) | |
except Exception as e: | |
return APIResponse( | |
success=False, | |
error=f"Failed to check status: {str(e)}" | |
) | |
def generate_wrapped(self, entity_id: str) -> APIResponse: | |
try: | |
# Get current year's start and end dates | |
current_year = datetime.now().year | |
time_min = f"{current_year},1,1,0,0,0" | |
time_max = f"{current_year},12,31,23,59,59" | |
request_params = { | |
"calendar_id": "primary", | |
"timeMin": time_min, | |
"timeMax": time_max, | |
"single_events": True, | |
"max_results": 2500, | |
"order_by": "startTime" | |
} | |
events_response = self.toolset.execute_action( | |
action=Action.GOOGLECALENDAR_FIND_EVENT, | |
params=request_params, | |
entity_id=entity_id | |
) | |
llm_response = llm.complete(f"{str(events_response)} is the event response. Based on this what tech billionaire are they most similar to and why??") | |
if events_response["successfull"]: | |
stats = self.analyze_calendar_events(events_response) | |
return APIResponse( | |
success=True, | |
data=stats | |
) | |
else: | |
return APIResponse( | |
success=False, | |
error=events_response["error"] or "Failed to fetch calendar events" | |
) | |
except Exception as e: | |
return APIResponse( | |
success=False, | |
error=f"Failed to generate wrapped: {str(e)}" | |
) | |
def create_gradio_api(): | |
service = CalendarService() | |
def handle_connection(entity_id: str, redirect_url: Optional[str] = None) -> str: | |
response = service.initiate_connection(entity_id, redirect_url) | |
return response.to_json() | |
def check_status(entity_id: str) -> str: | |
response = service.check_status(entity_id) | |
return response.to_json() | |
def generate_wrapped(entity_id: str) -> str: | |
response = service.generate_wrapped(entity_id) | |
return response.to_json() | |
# Create API endpoints | |
connection_api = gr.Interface( | |
fn=handle_connection, | |
inputs=[ | |
gr.Textbox(label="Entity ID"), | |
gr.Textbox(label="Redirect URL", placeholder="https://yourwebsite.com/connection/success") | |
], | |
outputs=gr.JSON(), | |
title="Initialize Calendar Connection", | |
description="Start a new calendar connection for an entity", | |
examples=[["user123", "https://example.com/callback"]] | |
) | |
status_api = gr.Interface( | |
fn=check_status, | |
inputs=gr.Textbox(label="Entity ID"), | |
outputs=gr.JSON(), | |
title="Check Connection Status", | |
description="Check the status of an existing connection", | |
examples=[["user123"]] | |
) | |
wrapped_api = gr.Interface( | |
fn=generate_wrapped, | |
inputs=gr.Textbox(label="Entity ID"), | |
outputs=gr.JSON(), | |
title="Generate Calendar Wrapped", | |
description="Generate a calendar wrapped summary for an entity", | |
examples=[["user123"]] | |
) | |
# Combine all interfaces | |
api = gr.TabbedInterface( | |
[connection_api, status_api, wrapped_api], | |
["Connect", "Check Status", "Generate Wrapped"], | |
title="Calendar Wrapped API", | |
) | |
return api | |
if __name__ == "__main__": | |
api = create_gradio_api() | |
api.launch(server_name="0.0.0.0", server_port=7860) |