Thamaraikannan commited on
Commit
1f897b0
·
verified ·
1 Parent(s): aef042b

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +26 -0
  2. app.py +76 -0
  3. requirements.txt +0 -0
Dockerfile ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use an official Python runtime as a parent image
2
+ FROM python:3.10-slim
3
+
4
+ # Set the working directory inside the container
5
+ WORKDIR /app
6
+
7
+ # Install system dependencies
8
+ RUN apt-get update && apt-get install -y \
9
+ gcc \
10
+ libcurl4-openssl-dev \
11
+ libssl-dev \
12
+ && rm -rf /var/lib/apt/lists/*
13
+
14
+ # Install Python dependencies
15
+ COPY requirements.txt .
16
+
17
+ RUN pip install --no-cache-dir -r requirements.txt
18
+
19
+ # Copy the rest of the application code
20
+ COPY . .
21
+
22
+ # Expose the port FastAPI runs on
23
+ EXPOSE 80
24
+
25
+ # Run the application with Uvicorn
26
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "80"]
app.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from pydantic import BaseModel
3
+ from typing import List
4
+ import requests
5
+ import os
6
+ import google.generativeai as genai
7
+ from dotenv import load_dotenv
8
+
9
+ load_dotenv()
10
+
11
+
12
+ app = FastAPI()
13
+
14
+ # Define input schema
15
+ class QAInput(BaseModel):
16
+ questions: List[str]
17
+ answers: List[str]
18
+
19
+
20
+ # Set your API key
21
+ GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
22
+
23
+ genai.configure(api_key=GEMINI_API_KEY)
24
+
25
+ def mock_gemini_response(prompt: str) -> str:
26
+ try:
27
+ model = genai.GenerativeModel("gemini-2.0-flash")
28
+ response = model.generate_content(prompt)
29
+ return response.text.strip()
30
+ except Exception as e:
31
+ return f"Error calling Gemini: {str(e)}"
32
+
33
+ # Endpoint to recommend course
34
+ @app.post("/recommend-course")
35
+ def recommend_course(data: QAInput):
36
+ # Step 1: Fetch course list from LMS
37
+ url = "https://lmslearn.frappe.cloud/api/resource/LMS Course"
38
+ headers = {
39
+ "Authorization": "token ecef74adb0ffd76:122897a76b48867",
40
+ "Accept": "application/json"
41
+ }
42
+
43
+ try:
44
+ response = requests.get(url, headers=headers)
45
+ response.raise_for_status()
46
+ except requests.RequestException as e:
47
+ raise HTTPException(status_code=500, detail=f"Failed to fetch course list: {str(e)}")
48
+
49
+ courses = response.json().get("data", [])
50
+
51
+ # Step 2: Build prompt for Gemini
52
+ user_input = "\n".join(f"Q: {q}\nA: {a}" for q, a in zip(data.questions, data.answers))
53
+ course_list = "\n".join([f"- {course['course_name']}" for course in courses if 'course_name' in course])
54
+ prompt = f"""
55
+ You are an intelligent course recommender.
56
+
57
+ Based on the following Q&A from a user:
58
+ {user_input}
59
+
60
+ Here is a list of available courses:
61
+ {course_list}
62
+
63
+ Recommend the most suitable course for the user.
64
+
65
+ Instructions:
66
+ - Do not return the user's questions or answers.
67
+ - Return only the title of the most suitable course.
68
+ - Do not modify the course titles from the available course list.
69
+ """
70
+
71
+ gemini_response = mock_gemini_response(prompt)
72
+
73
+ return {"recommendation": gemini_response}
74
+
75
+
76
+
requirements.txt ADDED
Binary file (1.72 kB). View file