vilik commited on
Commit
ba99ec5
·
verified ·
1 Parent(s): 86d401d

Upload 3 files

Browse files
Files changed (3) hide show
  1. README.md +22 -13
  2. app.py +75 -0
  3. requirements.txt +5 -0
README.md CHANGED
@@ -1,13 +1,22 @@
1
- ---
2
- title: El Capitan
3
- emoji: 🚀
4
- colorFrom: red
5
- colorTo: green
6
- sdk: gradio
7
- sdk_version: 5.25.0
8
- app_file: app.py
9
- pinned: false
10
- license: mit
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
1
+ # El_Kapitán_100b – HF Space
2
+
3
+ ### What this is:
4
+ This is a lightweight RAG chatbot for cross-country skiing, using Gemini API + FAISS + personalization.
5
+
6
+ ### How to use:
7
+ 1. Upload your `chunks.json`, `faiss.index`, and `profiles.json` into this Space.
8
+ 2. Set your secret key:
9
+ - Go to Settings → Secrets → Add:
10
+ - `GEMINI_API_KEY` = your Gemini API key from Google AI Studio
11
+ 3. Deploy and test!
12
+
13
+ POST requests should be sent to `/ask` with JSON:
14
+ ```json
15
+ {
16
+ "question": "How to train in summer?",
17
+ "userId": "vilda",
18
+ "profileName": "vilda"
19
+ }
20
+ ```
21
+
22
+ Enjoy smart, personalized coaching responses! 🎿
app.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import json
3
+ import os
4
+ import numpy as np
5
+ import faiss
6
+ from sentence_transformers import SentenceTransformer
7
+ from flask import Flask, request, jsonify
8
+ from flask_cors import CORS
9
+ import google.generativeai as genai
10
+
11
+ app = Flask(__name__)
12
+ CORS(app)
13
+
14
+ # Load Gemini
15
+ genai.configure(api_key=os.environ["GEMINI_API_KEY"])
16
+ gemini_model = genai.GenerativeModel("gemini-1.5-flash")
17
+
18
+ # Lazy load
19
+ model = None
20
+ chunks = None
21
+ index = None
22
+
23
+ # Load profiles once
24
+ with open("profiles.json", "r") as f:
25
+ profiles = json.load(f)
26
+
27
+ @app.route("/ask", methods=["POST"])
28
+ def ask():
29
+ global model, chunks, index
30
+
31
+ data = request.get_json()
32
+ question = data.get("question")
33
+ user_id = data.get("userId")
34
+ profile_name = data.get("profileName", "").lower()
35
+
36
+ if not question or not user_id:
37
+ return jsonify({"answer": "Missing question or userId"}), 400
38
+
39
+ # Lazy load embeddings and FAISS
40
+ if model is None:
41
+ model = SentenceTransformer("intfloat/e5-small")
42
+
43
+ if chunks is None or index is None:
44
+ with open("chunks.json", "r") as f:
45
+ chunks = json.load(f)
46
+ index = faiss.read_index("faiss.index")
47
+
48
+ # Embed question
49
+ q_emb = model.encode([question])
50
+ D, I = index.search(np.array(q_emb), k=3)
51
+ context = "\n\n".join([chunks[i] for i in I[0]])
52
+
53
+ # Load profile
54
+ bullet_points = profiles.get(profile_name, [])
55
+ profile_context = "\n".join(bullet_points)
56
+
57
+ # Prompt
58
+ prompt = f"""
59
+ You are El_Kapitán_100b, a professional cross-country skiing coach.
60
+
61
+ User profile:
62
+ {profile_context}
63
+
64
+ Context:
65
+ {context}
66
+
67
+ Question:
68
+ {question}
69
+ """
70
+
71
+ try:
72
+ response = gemini_model.generate_content(prompt)
73
+ return jsonify({"answer": response.text})
74
+ except Exception as e:
75
+ return jsonify({"answer": f"Server error: {str(e)}"})
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ flask
2
+ flask-cors
3
+ sentence-transformers
4
+ faiss-cpu
5
+ google-generativeai