File size: 21,642 Bytes
99bbd64
dffa418
 
99bbd64
89ac774
d034937
 
 
 
 
 
7dbf682
41d24fb
 
e74682a
 
7a69cc2
 
dffa418
41d24fb
afbea99
41d24fb
 
 
bcc36a0
24a440f
fa24c7d
e74682a
 
41d24fb
 
e74682a
 
 
 
 
 
 
 
 
7a69cc2
 
 
 
 
1c8d444
 
 
 
e74682a
 
fa53273
41d24fb
 
 
 
 
7a69cc2
 
 
 
 
e941bd7
7a69cc2
 
1c8d444
 
 
 
 
 
 
d034937
41d24fb
 
 
e74682a
41d24fb
d034937
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41d24fb
 
 
fa53273
 
 
 
 
41d24fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fa53273
 
 
 
 
 
e74682a
 
 
 
 
 
dffa418
 
3d9c6c2
dffa418
3d9c6c2
dffa418
 
 
3d9c6c2
dffa418
 
 
 
 
 
 
 
3d9c6c2
 
 
 
dffa418
3d9c6c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dffa418
3d9c6c2
 
 
 
 
 
 
 
 
dffa418
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7a69cc2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7904f7c
7a69cc2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ada7a79
7a69cc2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1c8d444
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
#Fast APi Packages
from fastapi import FastAPI, File, HTTPException, status
from fastapi.responses import JSONResponse
from pydantic import BaseModel
import json
from typing import List, Dict, Any
import pandas as pd
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from scipy import sparse
from datetime import datetime
import warnings
import os
import logging
import requests
import io
from sklearn.preprocessing import StandardScaler
from collections import defaultdict

warnings.filterwarnings('ignore')

# Set up logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

app = FastAPI()

# URL of the Excel file
EXCEL_URL = "https://huggingface.co/spaces/Vaibhav84/RecommendationAPI/resolve/main/DataSetSample.xlsx"

try:
    # Download the file from URL
    logger.info(f"Attempting to download Excel file from: {EXCEL_URL}")
    response = requests.get(EXCEL_URL)
    response.raise_for_status()  # Raises an HTTPError if the status is 4xx, 5xx
    
    # Read the Excel file from the downloaded content
    excel_content = io.BytesIO(response.content)
    purchase_history = pd.read_excel(excel_content, sheet_name='Transaction History', 
                                   parse_dates=['Purchase_Date'])
    
    # Read Customer Profile sheet
    excel_content.seek(0)  # Reset buffer position
    customer_profiles = pd.read_excel(excel_content, sheet_name='Customer Profile (Individual)')

    # Read Social Media Sentiment
    excel_content.seek(0)  # Reset buffer position
    customer_Media = pd.read_excel(excel_content, sheet_name='Social Media Sentiment',parse_dates=['Timestamp'])

    logger.info("Successfully downloaded and loaded Excel file")
    
    # Process the data
    purchase_history['Customer_Id'] = purchase_history['Customer_Id'].astype(str)
    product_categories = purchase_history[['Product_Id', 'Category']].drop_duplicates().set_index('Product_Id')['Category'].to_dict()
    purchase_counts = purchase_history.groupby(['Customer_Id', 'Product_Id']).size().unstack(fill_value=0)
    sparse_purchase_counts = sparse.csr_matrix(purchase_counts)
    cosine_similarities = cosine_similarity(sparse_purchase_counts.T)

     # Process customer profiles data
    customer_profiles['Customer_Id'] = customer_profiles['Customer_Id'].astype(str)
    
    # Normalize numerical features if they exist
    numerical_features = ['Age', 'Income per year (in dollars)']  # Add or modify based on your actual columns
    scaler = StandardScaler()
    customer_profiles[numerical_features] = scaler.fit_transform(customer_profiles[numerical_features])

      # Process the data media
    customer_Media['Customer_Id'] = customer_Media['Customer_Id'].astype(str)
    tweet_categories = customer_Media[['Post_Id', 'Platform']].drop_duplicates().set_index('Post_Id')['Platform'].to_dict()
    tweet_counts = customer_Media.groupby(['Customer_Id', 'Post_Id']).size().unstack(fill_value=0)
    sparse_tweet_counts = sparse.csr_matrix(tweet_counts)
    cosine_similarities_tweet = cosine_similarity(sparse_tweet_counts.T)
    
    logger.info("Data processing completed successfully")
    
except Exception as e:
    logger.error(f"Error downloading or processing data: {str(e)}")
    raise

def get_customer_items_and_recommendations(user_id: str, n: int = 5) -> tuple[List[Dict], List[Dict]]:
    """
    Get both purchased items and recommendations for a user
    """
    user_id = str(user_id)
    
    if user_id not in purchase_counts.index:
        return [], []
    
    purchased_items = list(purchase_counts.columns[purchase_counts.loc[user_id] > 0])
    
    purchased_items_info = []
    user_purchases = purchase_history[purchase_history['Customer_Id'] == user_id]
    
    for item in purchased_items:
        item_purchases = user_purchases[user_purchases['Product_Id'] == item]
        total_amount = float(item_purchases['Amount (In Dollars)'].sum())
        last_purchase = pd.to_datetime(item_purchases['Purchase_Date'].max())
        category = product_categories.get(item, 'Unknown')
        purchased_items_info.append({
            'product_id': item,
            'category': category,
            'total_amount': total_amount,
            'last_purchase': last_purchase.strftime('%Y-%m-%d')
        })
    
    user_idx = purchase_counts.index.get_loc(user_id)
    user_history = sparse_purchase_counts[user_idx].toarray().flatten()
    similarities = cosine_similarities.dot(user_history)
    purchased_indices = np.where(user_history > 0)[0]
    similarities[purchased_indices] = 0
    recommended_indices = np.argsort(similarities)[::-1][:n]
    recommended_items = list(purchase_counts.columns[recommended_indices])
    recommended_items = [item for item in recommended_items if item not in purchased_items]
    
    recommended_items_info = [
        {
            'product_id': item,
            'category': product_categories.get(item, 'Unknown')
        }
        for item in recommended_items
    ]

    return purchased_items_info, recommended_items_info

@app.get("/")
async def root():
    return {
        "message": "Welcome to the Recommendation API",
        "status": "running",
        "data_loaded": purchase_history is not None
    }

@app.get("/recommendations/{customer_id}")
async def get_recommendations(customer_id: str, n: int = 5):
    """
    Get recommendations for a customer
    
    Parameters:
    - customer_id: The ID of the customer
    - n: Number of recommendations to return (default: 5)
    
    Returns:
    - JSON object containing purchase history and recommendations
    """
    try:
        purchased_items, recommended_items = get_customer_items_and_recommendations(customer_id, n)
        
        return {
            "customer_id": customer_id,
            "purchase_history": purchased_items,
            "recommendations": recommended_items
        }
    except Exception as e:
        logger.error(f"Error processing request for customer {customer_id}: {str(e)}")
        raise HTTPException(status_code=404, detail=f"Error processing customer ID: {customer_id}. {str(e)}")

@app.get("/health")
async def health_check():
    """
    Health check endpoint that returns system information
    """
    return {
        "status": "healthy",
        "data_loaded": purchase_history is not None,
        "number_of_customers": len(purchase_counts.index) if purchase_history is not None else 0,
        "number_of_products": len(purchase_counts.columns) if purchase_history is not None else 0
    }

@app.post("/login")
async def login(customer_id: str, password: str):
    """
    Login endpoint to validate customer ID and password
    
    Parameters:
    - customer_id: The ID of the customer to validate
    - password: Password (first three chars of customer_id + "123")
    
    Returns:
    - JSON object containing login status and message
    """
    try:
        # Convert customer_id to string to match the format in purchase_history
        customer_id = str(customer_id)
        
        # Generate expected password (first three chars + "123")
        expected_password = f"{customer_id[:3]}123"
        
        # Check if customer exists and password matches
        if customer_id in purchase_history['Customer_Id'].unique():
            if password == expected_password:
                # Get customer's basic information
                customer_data = purchase_history[purchase_history['Customer_Id'] == customer_id]
                total_purchases = len(customer_data)
                total_spent = customer_data['Amount (In Dollars)'].sum()
                
                # Convert last purchase date to datetime if it's not already
                last_purchase = pd.to_datetime(customer_data['Purchase_Date'].max())
                last_purchase_str = last_purchase.strftime('%Y-%m-%d')
                
                return JSONResponse(
                    status_code=status.HTTP_200_OK,
                    content={
                        "status": "success",
                        "message": "Login successful",
                        "customer_id": customer_id,
                        "customer_stats": {
                            "total_purchases": total_purchases,
                            "total_spent": float(total_spent),
                            "last_purchase_date": last_purchase_str
                        }
                    }
                )
            else:
                return JSONResponse(
                    status_code=status.HTTP_401_UNAUTHORIZED,
                    content={
                        "status": "error",
                        "message": "Invalid password"
                    }
                )
        else:
            return JSONResponse(
                status_code=status.HTTP_401_UNAUTHORIZED,
                content={
                    "status": "error",
                    "message": "Invalid customer ID"
                }
            )
            
    except Exception as e:
        logger.error(f"Error during login for customer {customer_id}: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"Error during login process: {str(e)}"
        )
# Add content recommendation function
def get_content_recommendations(customer_id: str, n: int = 5) -> List[Dict]:
    """
    Get content recommendations based on customer profile
    """
    try:
        # Get customer profile
        customer_profile = customer_profiles[customer_profiles['Customer_Id'] == customer_id].iloc[0]
        
        # Define content rules based on customer attributes
        content_suggestions = []
        
        # Age-based recommendations
        age = customer_profile['Age'] * scaler.scale_[0] + scaler.mean_[0]  # Denormalize age
        
        if age < 25:
            content_suggestions.extend([
                {"type": "Video", "title": "Getting Started with Personal Finance", "category": "Financial Education"},
                {"type": "Article", "title": "Budgeting Basics for Young Adults", "category": "Financial Planning"},
                {"type": "Interactive", "title": "Investment 101 Quiz", "category": "Education"}
            ])
        elif age < 40:
            content_suggestions.extend([
                {"type": "Video", "title": "Investment Strategies for Growing Wealth", "category": "Investment"},
                {"type": "Article", "title": "Family Financial Planning Guide", "category": "Financial Planning"},
                {"type": "Webinar", "title": "Real Estate Investment Basics", "category": "Investment"}
            ])
        else:
            content_suggestions.extend([
                {"type": "Video", "title": "Retirement Planning Strategies", "category": "Retirement"},
                {"type": "Article", "title": "Estate Planning Essentials", "category": "Financial Planning"},
                {"type": "Webinar", "title": "Tax Optimization for Retirement", "category": "Tax Planning"}
            ])
        
        # Income-based recommendations
        income = customer_profile['Income per year (in dollars)'] * scaler.scale_[1] + scaler.mean_[1]  # Denormalize income
        
        if income < 50000:
            content_suggestions.extend([
                {"type": "Video", "title": "Debt Management Strategies", "category": "Debt Management"},
                {"type": "Article", "title": "Saving on a Tight Budget", "category": "Budgeting"}
            ])
        elif income < 100000:
            content_suggestions.extend([
                {"type": "Video", "title": "Tax-Efficient Investment Strategies", "category": "Investment"},
                {"type": "Article", "title": "Maximizing Your 401(k)", "category": "Retirement"}
            ])
        else:
            content_suggestions.extend([
                {"type": "Video", "title": "Advanced Tax Planning Strategies", "category": "Tax Planning"},
                {"type": "Article", "title": "High-Net-Worth Investment Guide", "category": "Investment"}
            ])
        
        # Add personalization based on purchase history
        if customer_id in purchase_history['Customer_Id'].unique():
            customer_purchases = purchase_history[purchase_history['Customer_Id'] == customer_id]
            categories = customer_purchases['Category'].unique()
            
            for category in categories:
                if category == 'Investment':
                    content_suggestions.append({
                        "type": "Video",
                        "title": f"Advanced {category} Strategies",
                        "category": category
                    })
                elif category == 'Insurance':
                    content_suggestions.append({
                        "type": "Article",
                        "title": f"Understanding Your {category} Options",
                        "category": category
                    })
        
        # Remove duplicates and limit to n recommendations
        seen = set()
        unique_suggestions = []
        for suggestion in content_suggestions:
            key = (suggestion['title'], suggestion['type'])
            if key not in seen:
                seen.add(key)
                unique_suggestions.append(suggestion)
        
        return unique_suggestions[:n]
        
    except Exception as e:
        logger.error(f"Error generating content recommendations: {str(e)}")
        return []

# Add new endpoint for content recommendations
@app.get("/content-recommendations/{customer_id}")
async def get_customer_content_recommendations(customer_id: str, n: int = 5):
    """
    Get personalized content recommendations for a customer
    
    Parameters:
    - customer_id: The ID of the customer
    - n: Number of recommendations to return (default: 5)
    
    Returns:
    - JSON object containing personalized content recommendations
    """
    try:
        # Validate customer
        if customer_id not in customer_profiles['Customer_Id'].unique():
            raise HTTPException(
                status_code=status.HTTP_404_NOT_FOUND,
                detail="Customer ID not found"
            )
        
        # Get customer profile summary
        customer_profile = customer_profiles[customer_profiles['Customer_Id'] == customer_id].iloc[0]
        profile_summary = {
            "age_group": "Young" if customer_profile['Age'] < 25 else "Middle" if customer_profile['Age'] < 40 else "Senior",
            "income_level": "Low" if customer_profile['Income per year (in dollars)'] < 50000 else "Medium" if customer_profile['Income per year (in dollars)'] < 100000 else "High"
        }
        
        # Get content recommendations
        recommendations = get_content_recommendations(customer_id, n)
        
        return {
            "customer_id": customer_id,
            "profile_summary": profile_summary,
            "recommendations": recommendations
        }
        
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Error processing content recommendations for customer {customer_id}: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"Error processing request: {str(e)}"
        )
    
@app.get("/social-sentiment/{customer_id}")
async def get_social_sentiment(customer_id: str):
    """
    Get social media sentiment analysis for a customer
    
    Parameters:
    - customer_id: The ID of the customer
    
    Returns:
    - JSON object containing sentiment analysis and insights
    """
    try:
        # Validate customer
        if customer_id not in customer_Media['Customer_Id'].unique():
            raise HTTPException(
                status_code=status.HTTP_404_NOT_FOUND,
                detail="No social media data found for this customer"
            )
        
        # Get customer's social media data
        customer_posts = customer_Media[customer_Media['Customer_Id'] == customer_id]
        
        # Calculate sentiment metrics
        avg_sentiment = customer_posts['Sentiment_Score'].mean()
        recent_sentiment = customer_posts.sort_values('Timestamp', ascending=False)['Sentiment_Score'].iloc[0]
        
        # Calculate sentiment trend
        customer_posts['Timestamp'] = pd.to_datetime(customer_posts['Timestamp'])
        sentiment_trend = customer_posts.sort_values('Timestamp')
        
        # Platform breakdown
        platform_stats = customer_posts.groupby('Platform').agg({
            'Post_Id': 'count',
            'Sentiment_Score': 'mean'
        }).round(2)
        
        platform_breakdown = [
            {
                "platform": platform,
                "post_count": int(stats['Post_Id']),
                "avg_sentiment": float(stats['Sentiment_Score'])
            }
            for platform, stats in platform_stats.iterrows()
        ]
        
        # Intent analysis
        intent_distribution = customer_posts['Intent'].value_counts().to_dict()
        
        # Get most recent posts with sentiments
        recent_posts = customer_posts.sort_values('Timestamp', ascending=False).head(5)
        recent_activities = [
            {
                "timestamp": post['Timestamp'].strftime('%Y-%m-%d %H:%M:%S'),
                "platform": post['Platform'],
                "content": post['Content'],
                "sentiment_score": float(post['Sentiment_Score']),
                "intent": post['Intent']
            }
            for _, post in recent_posts.iterrows()
        ]
        
        # Calculate sentiment categories
        sentiment_categories = {
            "positive": len(customer_posts[customer_posts['Sentiment_Score'] > 0.5]),
            "neutral": len(customer_posts[(customer_posts['Sentiment_Score'] >= -0.5) & 
                                       (customer_posts['Sentiment_Score'] <= 0.5)]),
            "negative": len(customer_posts[customer_posts['Sentiment_Score'] < -0.5])
        }
        
        # Determine overall mood
        if avg_sentiment > 0.5:
            overall_mood = "Positive"
        elif avg_sentiment < -0.5:
            overall_mood = "Negative"
        else:
            overall_mood = "Neutral"
            
        # Generate insights
        insights = []
        
        # Trend insight
        sentiment_change = recent_sentiment - customer_posts['Sentiment_Score'].iloc[0]
        if abs(sentiment_change) > 0.3:
            trend_direction = "improved" if sentiment_change > 0 else "declined"
            insights.append(f"Customer sentiment has {trend_direction} over time")
            
        # Platform insight
        if len(platform_stats) > 1:
            best_platform = platform_stats['Sentiment_Score'].idxmax()
            insights.append(f"Customer shows most positive engagement on {best_platform}")
            
        # Engagement insight
        if len(recent_activities) > 0:
            recent_avg = sum(post['sentiment_score'] for post in recent_activities) / len(recent_activities)
            if abs(recent_avg - avg_sentiment) > 0.3:
                trend = "improving" if recent_avg > avg_sentiment else "declining"
                insights.append(f"Recent sentiment is {trend} compared to overall average")
        
        return {
            "customer_id": customer_id,
            "overall_sentiment": {
                "average_score": float(avg_sentiment),
                "recent_score": float(recent_sentiment),
                "overall_mood": overall_mood
            },
            "sentiment_distribution": sentiment_categories,
            "platform_analysis": platform_breakdown,
            "intent_analysis": intent_distribution,
            "recent_activities": recent_activities,
            "insights": insights,
            "analysis_timestamp": datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        }
        
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Error processing social sentiment for customer {customer_id}: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"Error processing request: {str(e)}"
        )

# Add a combined endpoint for full customer analysis
@app.get("/customer-analysis/{customer_id}")
async def get_customer_analysis(customer_id: str):
    """
    Get comprehensive customer analysis including recommendations and sentiment
    
    Parameters:
    - customer_id: The ID of the customer
    
    Returns:
    - JSON object containing full customer analysis
    """
    try:
        # Get content recommendations
        content_recs = await get_customer_content_recommendations(customer_id)
        
        # Get social sentiment
        sentiment_analysis = await get_social_sentiment(customer_id)
        
        # Get purchase recommendations
        purchase_recs = await get_recommendations(customer_id)
        
        return {
            "customer_id": customer_id,
            "sentiment_analysis": sentiment_analysis,
            "content_recommendations": content_recs,
            "purchase_recommendations": purchase_recs,
            "analysis_timestamp": datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        }
        
    except Exception as e:
        logger.error(f"Error processing customer analysis for {customer_id}: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"Error processing request: {str(e)}"
        )