File size: 3,101 Bytes
fbbcc00
 
ee83397
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbbcc00
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ee83397
 
 
 
fbbcc00
 
20317a6
ee83397
 
20317a6
ee83397
fbbcc00
20317a6
ee83397
 
20317a6
ee83397
 
fbbcc00
 
 
20317a6
ee83397
 
20317a6
ee83397
 
fbbcc00
ee83397
 
 
 
20317a6
ee83397
 
 
 
 
 
fbbcc00
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# app.py

import torch
import pickle
import gradio as gr
from model import NCFModel
import numpy as np

# Load encoders and user_positive_items
with open('user_encoder.pkl', 'rb') as f:
    user_encoder = pickle.load(f)

with open('item_encoder.pkl', 'rb') as f:
    item_encoder = pickle.load(f)

with open('user_positive_items.pkl', 'rb') as f:
    user_positive_items = pickle.load(f)

# Load the trained model
class NCFModelWrapper:
    def __init__(self, model_path, num_users, num_items, embedding_size=50, device='cpu'):
        self.device = torch.device(device)
        self.model = NCFModel(num_users, num_items, embedding_size=embedding_size).to(self.device)
        self.model.load_state_dict(torch.load(model_path, map_location=self.device))
        self.model.eval()

    def predict(self, user, item):
        with torch.no_grad():
            user = torch.tensor([user], dtype=torch.long).to(self.device)
            item = torch.tensor([item], dtype=torch.long).to(self.device)
            output = self.model(user, item)
            score = torch.sigmoid(output).item()
        return score

# Determine number of users and items from encoders
num_users = len(user_encoder.classes_)
num_items = len(item_encoder.classes_)

# Initialize the model
model = NCFModelWrapper(
    model_path='best_ncf_model.pth',
    num_users=num_users,
    num_items=num_items,
    embedding_size=50,  # Ensure this matches your trained model
    device='cpu'  # Change to 'cuda' if GPU is available and desired
)

def recommend(user_id, num_recommendations=5):
    try:
        user = user_encoder.transform([user_id])[0]
    except:
        return f"User ID '{user_id}' not found."

    # Get items the user has interacted with
    pos_items = user_positive_items.get(user, set())

    # Get all possible items
    all_items = set(range(num_items))

    # Candidate items are those not interacted with
    candidate_items = list(all_items - pos_items)

    # Predict scores for candidate items
    scores = []
    for item in candidate_items:
        score = model.predict(user, item)
        scores.append((item, score))

    # Sort items based on score
    scores.sort(key=lambda x: x[1], reverse=True)

    # Get top N recommendations
    top_items = scores[:num_recommendations]

    recommendations = []
    for item_id, score in top_items:
        original_item_id = item_encoder.inverse_transform([item_id])[0]
        recommendations.append(f"Item ID: {original_item_id} (Score: {score:.4f})")

    return "\n".join(recommendations)

# Define Gradio interface
iface = gr.Interface(
    fn=recommend,
    inputs=[
        gr.Textbox(lines=1, placeholder="Enter User ID", label="User ID"),
        gr.Slider(minimum=1, maximum=20, step=1, value=5, label="Number of Recommendations")
    ],
    outputs="text",
    title="Neural Collaborative Filtering Recommendation System",
    description="Enter a User ID to receive personalized item recommendations.",
    examples=[
        ["user_1", 5],
        ["user_2", 10],
        ["user_3", 7]
    ]
)

if __name__ == "__main__":
    iface.launch()