samu commited on
Commit
c151c44
·
1 Parent(s): b7cd334

v3 backend

Browse files
Dockerfile CHANGED
@@ -1,4 +1,4 @@
1
- FROM python:3.9
2
 
3
  COPY ./ ./
4
 
@@ -6,5 +6,5 @@ COPY ./ ./
6
  RUN pip install --no-cache --upgrade -r /requirements.txt
7
 
8
  # Start the application
9
- CMD ["uvicorn", "backend.api:app", "--host", "0.0.0.0", "--port", "7860"]
10
 
 
1
+ FROM python:3.10
2
 
3
  COPY ./ ./
4
 
 
6
  RUN pip install --no-cache --upgrade -r /requirements.txt
7
 
8
  # Start the application
9
+ CMD ["uvicorn", "backend.main:app", "--host", "0.0.0.0", "--port", "7860"]
10
 
backend/__pycache__/api.cpython-312.pyc DELETED
Binary file (2.02 kB)
 
backend/__pycache__/api.cpython-39.pyc DELETED
Binary file (1.56 kB)
 
backend/__pycache__/config.cpython-310.pyc ADDED
Binary file (3.68 kB). View file
 
backend/__pycache__/config.cpython-312.pyc CHANGED
Binary files a/backend/__pycache__/config.cpython-312.pyc and b/backend/__pycache__/config.cpython-312.pyc differ
 
backend/__pycache__/config.cpython-39.pyc DELETED
Binary file (3.16 kB)
 
backend/__pycache__/database.cpython-310.pyc ADDED
Binary file (10.1 kB). View file
 
backend/__pycache__/database.cpython-312.pyc ADDED
Binary file (12.6 kB). View file
 
backend/__pycache__/main.cpython-310.pyc ADDED
Binary file (2.1 kB). View file
 
backend/__pycache__/main.cpython-312.pyc CHANGED
Binary files a/backend/__pycache__/main.cpython-312.pyc and b/backend/__pycache__/main.cpython-312.pyc differ
 
backend/__pycache__/utils.cpython-312.pyc DELETED
Binary file (1.24 kB)
 
backend/__pycache__/utils.cpython-39.pyc DELETED
Binary file (748 Bytes)
 
backend/api.py DELETED
@@ -1,39 +0,0 @@
1
- from fastapi import FastAPI, HTTPException
2
- from fastapi.responses import JSONResponse, StreamingResponse
3
- from fastapi.middleware.cors import CORSMiddleware
4
- from fastapi import Request
5
- import uvicorn
6
- from pydantic import BaseModel, Field
7
- from typing import Union, List, Dict
8
- import json
9
- import asyncio
10
- from backend.utils import get_completion
11
- from backend.config import CURRICULUM_INSTRUCTIONS
12
-
13
- app = FastAPI()
14
-
15
- app.add_middleware(
16
- CORSMiddleware,
17
- allow_origins=["*"],
18
- allow_credentials=True,
19
- allow_methods=["*"],
20
- allow_headers=["*"],
21
- )
22
-
23
- # Define a request model
24
- class QueryRequest(BaseModel):
25
- user_query: str
26
-
27
- # Define a response model
28
- class QueryResponse(BaseModel):
29
- answer: str
30
-
31
- @app.get("/")
32
- async def root():
33
- return {"message": "Hello World"}
34
-
35
- @app.post("/curriculum")
36
- async def get_curriculum(request: QueryRequest):
37
- query = request.user_query
38
- response = await get_completion(prompt=query, instruction=CURRICULUM_INSTRUCTIONS)
39
- return JSONResponse(response, media_type="application/json")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
backend/config.py CHANGED
@@ -1,59 +1,71 @@
1
- CURRICULUM_INSTRUCTIONS = """
2
- You are an expert AI language learning curriculum designer.
3
- Your task is to create an intensive language learning curriculum tailored to the user's specific learning objectives and preferences.
4
- By default, design a one-month curriculum (divided into four weeks), but note that the duration can be adjusted if the user desires a different length of study time.
5
-
6
- **Curriculum Design Principles:**
7
-
8
- 1. **AI-Driven:** Leverage AI to create personalized learning experiences.
9
- 2. **Intensive:** Design the curriculum for significant weekly study time.
10
- 3. **Structured:** Divide the curriculum into four weeks (or the user-specified duration), with each week building upon the previous one.
11
- 4. **Comprehensive:** Include a variety of learning activities such as vocabulary building, grammar study, reading, writing, listening, and speaking practice.
12
- 5. **Personalized:** Adapt the curriculum to the user's learning goals, current level, interests, and native language.
13
- 6. **Measurable:** Suggest ways for the user to track their progress.
14
- 7. **Output Format:** Provide the curriculum in a valid JSON format.
15
- 8. **Weekly Content:** Focus on providing a theme and a set of activities for each week instead of daily content. For each week, include the approximate time the user should invest during that week (for example, "estimated_duration": "20 hours per week"). Adjust the duration if the user requests a different total timeframe.
16
-
17
- **Important Notes:**
18
- - The curriculum should be **bilingual** in the user's **native language** and the **target language**.
19
- - Provide detailed instructions, explanations, and examples in the user's **native language** (for context and easier understanding).
20
- - Activities and exercises should also include explanations in the user's **native language** where necessary, ensuring the learning experience is smooth and intuitive.
21
-
22
- **Output JSON Format:**
23
- ```json
24
- {
25
- "language": "target_language",
26
- "native_language": "user_native_language",
27
- "learning_goal": "user_provided_goal",
28
- "current_level": "user_provided_level",
29
- "weeks": [
30
- {
31
- "week": 1,
32
- "theme": "week_theme",
33
- "estimated_duration": "estimated_weekly_time",
34
- "activities": [
35
- {
36
- "type": "activity_type",
37
- "description": "activity_description_in_native_language"
38
- },
39
- {
40
- "type": "activity_type",
41
- "description": "activity_description_in_native_language"
42
- }, ...
43
- ]
44
- },
45
- {
46
- "week": 2,
47
- "theme": "week_theme",
48
- "estimated_duration": "estimated_weekly_time",
49
- "activities": [
50
- {
51
- "type": "activity_type",
52
- "description": "activity_description_in_native_language"
53
- }, ...
54
- ]
55
- }, ... // repeat for the duration of the curriculum
56
- ]
 
 
 
 
 
 
 
 
 
 
 
57
  }
58
- ```
 
59
  """
 
1
+ flashcard_mode_instructions = """
2
+ You are a vocabulary tutor specialized in teaching Chinese to non-native learners. Your goal is to help users quickly acquire context-specific vocabulary.
3
+
4
+ Given a user query that describes a scenario or topic, generate exactly 5 flashcards in valid JSON array format. Each flashcard must include:
5
+ - 'word': a Chinese word or phrase relevant to the query
6
+ - 'definition': a simple, learner-friendly explanation in English
7
+ - 'example': a short example sentence in Chinese that naturally uses the word
8
+
9
+ Ensure that all words are commonly used and appropriate for the given context. Favor words that would be useful in real-life communication. Do not include rare, archaic, or highly academic words.
10
+
11
+ Example input: 'Ordering food at a restaurant'
12
+
13
+ Example output:
14
+ [
15
+ {"word": "菜单", "definition": "menu", "example": "请给我看一下菜单。"},
16
+ … (4 more flashcards)
17
+ ]
18
+
19
+ Output ONLY valid JSON. Do not include explanations, preambles, or notes.
20
+ """
21
+
22
+ exercise_mode_instructions = """
23
+ You are an exercise generator focused on reinforcing Chinese vocabulary and grammar through cloze (fill-in-the-blank) activities. Your job is to create engaging exercises that match a specific scenario or vocabulary need.
24
+
25
+ Given a user query that describes a goal or context, generate exactly 5 cloze-style exercises in valid JSON array format. Each item must contain:
26
+ - 'sentence': a Chinese sentence with one blank represented as '___'
27
+ - 'answer': the correct word or phrase to complete the sentence
28
+ - 'choices': an array of 3 plausible distractor options (one of which is the correct answer)
29
+
30
+ All vocabulary must be suitable for beginner to intermediate learners. Choose distractors that are reasonable but clearly incorrect in the given context to aid in learning.
31
+
32
+ Example input: 'Talking about daily routines'
33
+
34
+ Example output:
35
+ [
36
+ {"sentence": "我每天早上___七点起床。", "answer": "大约", "choices": ["大约", "也许", "还是"]},
37
+ (4 more)
38
+ ]
39
+
40
+ Output ONLY valid JSON. Do not include extra text or formatting.
41
+ """
42
+
43
+ simulation_mode_instructions = """
44
+ You are a Chinese conversation simulator designed to help learners practice real-world communication scenarios. Your job is to create short interactive dialogues that mimic actual conversations.
45
+
46
+ Given a user query describing a specific situation, produce a JSON object with the following structure:
47
+ - 'scenario': a short description of the roleplay setting
48
+ - 'dialog': an array of at least 6 message turns (3 per speaker), alternating between 'tutor' and 'user' roles
49
+
50
+ Each message must include:
51
+ - 'role': either 'tutor' or 'user'
52
+ - 'chinese': the Chinese text
53
+ - 'pinyin': the phonetic transcription
54
+ - 'english': a simple English translation
55
+
56
+ Ensure that the tutor initiates the conversation. The dialogue should reflect real-life tone, pacing, and common expressions. Make the tutor encouraging and use slow, clear Mandarin. Use language that matches beginner to intermediate learners.
57
+
58
+ Example input: 'Buying a ticket at the train station'
59
+
60
+ Example output:
61
+ {
62
+ "scenario": "Buying a ticket at the train station",
63
+ "dialog": [
64
+ {"role": "tutor", "chinese": "你好,请问你要去哪里?", "pinyin": "Nǐ hǎo, qǐngwèn nǐ yào qù nǎlǐ?", "english": "Hello, may I ask where you're going?"},
65
+ {"role": "user", "chinese": "我要去上海。", "pinyin": "Wǒ yào qù Shànghǎi.", "english": "I want to go to Shanghai."},
66
+ … (at least 6 turns total)
67
+ ]
68
  }
69
+
70
+ Output ONLY valid JSON. Do not include introductory text or any extra formatting.
71
  """
backend/database.py ADDED
@@ -0,0 +1,293 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import psycopg2
2
+ import os
3
+ from psycopg2 import sql
4
+ from dotenv import load_dotenv
5
+
6
+ load_dotenv()
7
+
8
+ # Database Configuration from environment variables
9
+ DB_NAME = os.getenv("POSTGRES_DB", "linguaai")
10
+ DB_USER = os.getenv("POSTGRES_USER", "linguaai_user")
11
+ DB_PASSWORD = os.getenv("POSTGRES_PASSWORD", "LinguaAI1008")
12
+ DB_HOST = os.getenv("DB_HOST", "localhost")
13
+ DB_PORT = os.getenv("DB_PORT", "5432")
14
+
15
+ # SQL Schema Definition
16
+ SCHEMA_SQL = """
17
+ -- Drop existing objects if they exist
18
+ -- Note: Some drops below might be for tables not defined in this specific script.
19
+ DROP TABLE IF EXISTS user_activity_progress CASCADE;
20
+ DROP TABLE IF EXISTS activities CASCADE;
21
+ DROP TABLE IF EXISTS weekly_modules CASCADE;
22
+ DROP TABLE IF EXISTS curriculums CASCADE;
23
+ DROP TABLE IF EXISTS generated_flashcards CASCADE;
24
+ DROP TABLE IF EXISTS flashcard_sets CASCADE; -- Corrected name
25
+ DROP TABLE IF EXISTS generated_exercises CASCADE;
26
+ DROP TABLE IF EXISTS exercise_sets CASCADE; -- Corrected name
27
+ DROP TABLE IF EXISTS simulations CASCADE; -- Corrected name
28
+ DROP TABLE IF EXISTS users CASCADE;
29
+ DROP TYPE IF EXISTS activity_status CASCADE;
30
+
31
+ -- Table `users`
32
+ CREATE TABLE users (
33
+ user_id SERIAL PRIMARY KEY,
34
+ username VARCHAR(50) UNIQUE NOT NULL,
35
+ email VARCHAR(100) UNIQUE NOT NULL,
36
+ password_hash VARCHAR(255) NOT NULL,
37
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
38
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
39
+ );
40
+
41
+ -- Trigger function (remains the same)
42
+ CREATE OR REPLACE FUNCTION update_updated_at_column()
43
+ RETURNS TRIGGER AS $$
44
+ BEGIN
45
+ NEW.updated_at = now();
46
+ RETURN NEW;
47
+ END;
48
+ $$ language 'plpgsql';
49
+
50
+ -- Trigger for users (remains the same)
51
+ CREATE TRIGGER users_update_updated_at
52
+ BEFORE UPDATE ON users
53
+ FOR EACH ROW
54
+ EXECUTE FUNCTION update_updated_at_column();
55
+
56
+
57
+ -- ============================================
58
+ -- Tables for Generated Content (Flashcards)
59
+ -- ============================================
60
+
61
+ -- Table `flashcard_sets` (Represents one request/query)
62
+ CREATE TABLE flashcard_sets (
63
+ id SERIAL PRIMARY KEY,
64
+ user_id INTEGER NOT NULL REFERENCES users(user_id), -- Added FK reference for completeness
65
+ query TEXT NOT NULL,
66
+ flashcards JSONB NOT NULL, -- Stores an array of 5 flashcards
67
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
68
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP -- Added updated_at for consistency
69
+ );
70
+
71
+ CREATE INDEX idx_flashcard_set_user ON flashcard_sets(user_id);
72
+
73
+ -- Corrected Trigger definition for flashcard_sets
74
+ CREATE TRIGGER flashcard_sets_update_updated_at -- Renamed trigger
75
+ BEFORE UPDATE ON flashcard_sets -- Corrected table name
76
+ FOR EACH ROW
77
+ EXECUTE FUNCTION update_updated_at_column(); -- Assumes you want updated_at here too
78
+
79
+ -- Table `generated_flashcards` (Individual flashcards within a set)
80
+ CREATE TABLE generated_flashcards (
81
+ flashcard_id SERIAL PRIMARY KEY,
82
+ set_id INT NOT NULL REFERENCES flashcard_sets(id) ON DELETE CASCADE, -- Corrected FK reference (table and column)
83
+ word TEXT NOT NULL,
84
+ definition TEXT NOT NULL,
85
+ example TEXT, -- Example might be optional
86
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
87
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
88
+ );
89
+
90
+ CREATE INDEX idx_flashcard_set ON generated_flashcards(set_id);
91
+
92
+ -- Trigger for generated_flashcards (remains the same)
93
+ CREATE TRIGGER generated_flashcards_update_updated_at
94
+ BEFORE UPDATE ON generated_flashcards
95
+ FOR EACH ROW
96
+ EXECUTE FUNCTION update_updated_at_column();
97
+
98
+
99
+ -- ============================================
100
+ -- Tables for Generated Content (Exercises)
101
+ -- ============================================
102
+
103
+ -- Table `exercise_sets` (Represents one request/query) -- Corrected comment
104
+ CREATE TABLE exercise_sets (
105
+ id SERIAL PRIMARY KEY,
106
+ user_id INTEGER NOT NULL REFERENCES users(user_id), -- Added FK reference for completeness
107
+ query TEXT NOT NULL,
108
+ exercises JSONB NOT NULL, -- Array of 5 exercises
109
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
110
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP -- Added updated_at for consistency
111
+ );
112
+
113
+ CREATE INDEX idx_exercise_set_user ON exercise_sets(user_id); -- Corrected table name (was already correct but double-checked)
114
+
115
+ -- Corrected Trigger definition for exercise_sets
116
+ CREATE TRIGGER exercise_sets_update_updated_at -- Renamed trigger
117
+ BEFORE UPDATE ON exercise_sets -- Corrected table name
118
+ FOR EACH ROW
119
+ EXECUTE FUNCTION update_updated_at_column(); -- Assumes you want updated_at here too
120
+
121
+ -- Table `generated_exercises` (Individual exercises within a set)
122
+ CREATE TABLE generated_exercises (
123
+ exercise_id SERIAL PRIMARY KEY,
124
+ set_id INT NOT NULL REFERENCES exercise_sets(id) ON DELETE CASCADE, -- Corrected FK reference (table and column)
125
+ sentence TEXT NOT NULL,
126
+ answer TEXT NOT NULL,
127
+ choices JSONB NOT NULL, -- Storing the array of choices
128
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
129
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
130
+ );
131
+
132
+ CREATE INDEX idx_exercise_set ON generated_exercises(set_id);
133
+
134
+ -- Trigger for generated_exercises (remains the same)
135
+ CREATE TRIGGER generated_exercises_update_updated_at
136
+ BEFORE UPDATE ON generated_exercises
137
+ FOR EACH ROW
138
+ EXECUTE FUNCTION update_updated_at_column();
139
+
140
+
141
+ -- ============================================
142
+ -- Table for Generated Content (Simulations)
143
+ -- ============================================
144
+
145
+ -- Table `simulations` (Represents one simulation request/result) -- Corrected comment
146
+ CREATE TABLE simulations (
147
+ id SERIAL PRIMARY KEY,
148
+ user_id INTEGER NOT NULL REFERENCES users(user_id), -- Added FK reference for completeness
149
+ query TEXT NOT NULL,
150
+ scenario TEXT NOT NULL,
151
+ dialog JSONB NOT NULL, -- Array of turns with 'role', 'chinese', 'pinyin', 'english'
152
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
153
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP -- Added updated_at for consistency
154
+ );
155
+
156
+ CREATE INDEX idx_simulation_user ON simulations(user_id); -- Corrected table name
157
+
158
+ -- Corrected Trigger definition for simulations
159
+ CREATE TRIGGER simulations_update_updated_at -- Renamed trigger
160
+ BEFORE UPDATE ON simulations -- Corrected table name
161
+ FOR EACH ROW
162
+ EXECUTE FUNCTION update_updated_at_column(); -- Assumes you want updated_at here too
163
+ """
164
+
165
+ def get_db_connection():
166
+ """Get a synchronous database connection."""
167
+ try:
168
+ conn = psycopg2.connect(
169
+ dbname=DB_NAME,
170
+ user=DB_USER,
171
+ password=DB_PASSWORD,
172
+ host=DB_HOST,
173
+ port=DB_PORT
174
+ )
175
+ return conn
176
+ except psycopg2.Error as e:
177
+ print(f"Database connection error: {e}")
178
+ raise
179
+
180
+ def reset_sequences():
181
+ """Generate SQL to reset all sequences (auto-incrementing IDs) to 1."""
182
+ sequences_sql = """
183
+ SELECT 'ALTER SEQUENCE ' || sequence_name || ' RESTART WITH 1;'
184
+ FROM information_schema.sequences
185
+ WHERE sequence_schema = 'public';
186
+ """
187
+ return sequences_sql
188
+
189
+ def reset_database(confirm=True):
190
+ """Reset the database by dropping all tables and recreating them."""
191
+ if confirm:
192
+ user_confirm = input("WARNING: This will DELETE ALL DATA. Type 'yes' to proceed: ")
193
+ if user_confirm.lower() != 'yes':
194
+ print("Database reset cancelled.")
195
+ return
196
+
197
+ conn = None
198
+ try:
199
+ conn = get_db_connection()
200
+ conn.autocommit = False
201
+ print("Database connection established.")
202
+
203
+ with conn.cursor() as cur:
204
+ print("Dropping and recreating schema...")
205
+ # Execute the main schema SQL (includes drops)
206
+ cur.execute(SCHEMA_SQL)
207
+ print("Schema recreated successfully.")
208
+
209
+ # Generate and execute sequence reset SQL
210
+ print("Resetting sequences...")
211
+ reset_sql_query = reset_sequences()
212
+ cur.execute(reset_sql_query)
213
+ reset_commands = cur.fetchall()
214
+ for command in reset_commands:
215
+ cur.execute(command[0])
216
+ print("Sequences reset successfully.")
217
+
218
+ conn.commit()
219
+ print("Database reset complete.")
220
+
221
+ except psycopg2.Error as e:
222
+ print(f"Database error during reset: {e}")
223
+ if conn:
224
+ conn.rollback()
225
+ print("Transaction rolled back.")
226
+ except Exception as e:
227
+ print(f"An unexpected error occurred during reset: {e}")
228
+ if conn:
229
+ conn.rollback()
230
+ finally:
231
+ if conn:
232
+ conn.close()
233
+ print("Database connection closed.")
234
+
235
+ def setup_database(confirm=True):
236
+ """Set up the database schema if tables do not exist."""
237
+ if confirm:
238
+ user_confirm = input("Do you want to set up the database? Type 'yes' to proceed: ")
239
+ if user_confirm.lower() != 'yes':
240
+ print("Database setup cancelled.")
241
+ return
242
+
243
+ conn = None
244
+ try:
245
+ conn = get_db_connection()
246
+ conn.autocommit = False
247
+ print("Database connection established.")
248
+
249
+ with conn.cursor() as cur:
250
+ print("Checking if tables exist...")
251
+ cur.execute("""
252
+ SELECT EXISTS (
253
+ SELECT FROM information_schema.tables
254
+ WHERE table_schema = 'public'
255
+ AND table_name = 'users'
256
+ );
257
+ """)
258
+ tables_exist = cur.fetchone()[0]
259
+
260
+ if tables_exist:
261
+ print("Tables already exist. Use reset_database() to reset the database or run setup with confirm=False.")
262
+ conn.rollback() # Rollback as no changes should be made
263
+ return
264
+
265
+ print("Creating schema...")
266
+ cur.execute(SCHEMA_SQL)
267
+ print("Schema created successfully.")
268
+
269
+ conn.commit()
270
+ print("Database setup complete.")
271
+
272
+ except psycopg2.Error as e:
273
+ print(f"Database error during setup: {e}")
274
+ if conn:
275
+ conn.rollback()
276
+ print("Transaction rolled back.")
277
+ except Exception as e:
278
+ print(f"An unexpected error occurred during setup: {e}")
279
+ if conn:
280
+ conn.rollback()
281
+ finally:
282
+ if conn:
283
+ conn.close()
284
+ print("Database connection closed.")
285
+
286
+ if __name__ == "__main__":
287
+ action = input("Enter 'setup' to setup database or 'reset' to reset database: ").lower()
288
+ if action == 'reset':
289
+ reset_database()
290
+ elif action == 'setup':
291
+ setup_database()
292
+ else:
293
+ print("Invalid action. Use 'setup' or 'reset'.")
backend/main.py CHANGED
@@ -1,20 +1,84 @@
1
- import asyncio # <-- Import asyncio
2
- from backend.utils import get_completion
3
- from backend.config import CURRICULUM_INSTRUCTIONS
4
-
5
- # Define an async function to hold the await call
6
- async def main():
7
- query = "need to improve my chinese so as to propose to my girlfriend"
8
- print("Getting completion...") # Optional: Indicate progress
 
 
 
 
 
 
9
  try:
10
- response = await get_completion(prompt=query, instruction=CURRICULUM_INSTRUCTIONS)
11
- print("\nResponse:")
12
- print(response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  except Exception as e:
14
- print(f"An error occurred: {e}") # Basic error handling
 
15
 
16
- # Use asyncio.run() to execute the async main function
17
- # The if __name__ == "__main__": block ensures this runs only when
18
- # the script is executed directly (e.g., python -m backend.main)
19
- if __name__ == "__main__":
20
- asyncio.run(main())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from fastapi.responses import JSONResponse
3
+ from pydantic import BaseModel
4
+ from backend.utils import generate_completions
5
+ from backend import config
6
+ from backend.database import get_db_connection
7
+ import psycopg2
8
+ from psycopg2.extras import RealDictCursor
9
+
10
+ app = FastAPI()
11
+
12
+ # Dependency to get database connection
13
+ async def get_db():
14
+ conn = await get_db_connection()
15
  try:
16
+ yield conn
17
+ finally:
18
+ conn.close()
19
+
20
+ class GenerationRequest(BaseModel):
21
+ user_id: int
22
+ query: str
23
+
24
+ @app.get("/")
25
+ async def root():
26
+ return {"message": "Welcome to the AI Learning Assistant API!"}
27
+
28
+ @app.post("/generate/flashcards")
29
+ async def generate_flashcards(data: GenerationRequest):
30
+ try:
31
+ response = await generate_completions.get_completions(
32
+ data.query,
33
+ config.flashcard_mode_instructions
34
+ )
35
+ return JSONResponse(
36
+ content={
37
+ "data": response,
38
+ "type": "flashcards",
39
+ "status": "success"
40
+ },
41
+ status_code=200
42
+ )
43
  except Exception as e:
44
+ raise HTTPException(status_code=500, detail=str(e))
45
+
46
 
47
+ @app.post("/generate/exercises")
48
+ async def generate_exercises(data: GenerationRequest):
49
+ try:
50
+ response = await generate_completions.get_completions(
51
+ data.query,
52
+ config.exercise_mode_instructions
53
+ )
54
+ # adjust the response similar to generate_flashcards
55
+ return JSONResponse(
56
+ content={
57
+ "data": response,
58
+ "type": "exercises",
59
+ "status": "success"
60
+ },
61
+ status_code=200
62
+ )
63
+ except Exception as e:
64
+ raise HTTPException(status_code=500, detail=str(e))
65
+
66
+
67
+ @app.post("/generate/simulation")
68
+ async def generate_simulation(data: GenerationRequest):
69
+ try:
70
+ response = await generate_completions.get_completions(
71
+ data.query,
72
+ config.simulation_mode_instructions
73
+ )
74
+ # adjust the response similar to generate_flashcards
75
+ return JSONResponse(
76
+ content={
77
+ "data": response,
78
+ "type": "simulation",
79
+ "status": "success"
80
+ },
81
+ status_code=200
82
+ )
83
+ except Exception as e:
84
+ raise HTTPException(status_code=500, detail=str(e))
backend/utils.py DELETED
@@ -1,22 +0,0 @@
1
- import os
2
- import asyncio
3
- from openai import AsyncOpenAI
4
- from dotenv import load_dotenv
5
-
6
- load_dotenv()
7
-
8
- client = AsyncOpenAI(
9
- api_key=os.getenv("GEMINI_API_KEY"),
10
- base_url=os.getenv("GEMINI_BASE_URL"),
11
- )
12
-
13
- async def get_completion(prompt: str, instruction: str) -> str:
14
- response = await client.chat.completions.create(
15
- model=os.getenv("MODEL"),
16
- messages=[
17
- {"role": "system", "content": instruction},
18
- {"role": "user", "content": prompt}
19
- ],
20
- response_format={"type": "json_object"},
21
- )
22
- return response.choices[0].message.content
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
backend/utils/__pycache__/generate_completions.cpython-310.pyc ADDED
Binary file (1.76 kB). View file
 
backend/utils/__pycache__/generate_completions.cpython-312.pyc ADDED
Binary file (2.61 kB). View file
 
backend/utils/generate_completions.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from openai import AsyncOpenAI, OpenAI
2
+ import asyncio
3
+ import json
4
+ from typing import AsyncIterator
5
+ from typing import Union, List, Dict
6
+ from dotenv import load_dotenv
7
+ import os
8
+ load_dotenv()
9
+
10
+ # Initialize the async client
11
+ client = AsyncOpenAI(
12
+ base_url=os.getenv("BASE_URL"),
13
+ api_key=os.getenv("API_KEY"),
14
+ )
15
+
16
+ def process_input(data: Union[str, List[Dict[str, str]]]) -> Union[str, List[Dict[str, str]]]:
17
+ """
18
+ Processes input to either uppercase a string or modify the 'content' field
19
+ of a list of dictionaries.
20
+ """
21
+ if isinstance(data, str):
22
+ return data.strip() # Ensures prompt is cleaned up (optional)
23
+
24
+ elif isinstance(data, list):
25
+ # Ensure each item in the list is a dictionary with a 'content' key
26
+ return [
27
+ {**item, "content": item["content"].strip()} # Trims whitespace in 'content'
28
+ for item in data if isinstance(item, dict) and "content" in item
29
+ ]
30
+
31
+ else:
32
+ raise TypeError("Input must be a string or a list of dictionaries with a 'content' field")
33
+
34
+
35
+ async def get_completions(
36
+ prompt: Union[str, List[Dict[str, str]]],
37
+ instructions: str
38
+ ) -> str:
39
+ processed_prompt = process_input(prompt) # Ensures the input format is correct
40
+
41
+ if isinstance(processed_prompt, str):
42
+ messages = [
43
+ {"role": "system", "content": instructions},
44
+ {"role": "user", "content": processed_prompt}
45
+ ]
46
+ elif isinstance(processed_prompt, list):
47
+ messages = [{"role": "system", "content": instructions}] + processed_prompt
48
+ else:
49
+ raise TypeError("Unexpected processed input type.")
50
+
51
+ response = await client.chat.completions.create(
52
+ model=os.getenv("MODEL"),
53
+ messages=messages,
54
+ response_format={"type": "json_object"}
55
+ )
56
+
57
+ output: str = response.choices[0].message.content
58
+ return output
docker-compose.yml CHANGED
@@ -6,7 +6,7 @@ services:
6
  volumes:
7
  - ./output:/code/output
8
  environment:
9
- - GEMINI_API_KEY=${GEMINI_API_KEY}
10
  - OLLAMA_API_KEY=${OLLAMA_API_KEY}
11
  - API_PORT=${API_PORT}
12
  env_file:
@@ -16,4 +16,4 @@ services:
16
  watch:
17
  - action: sync
18
  path: .
19
- target: /code
 
6
  volumes:
7
  - ./output:/code/output
8
  environment:
9
+ - API_KEY=${API_KEY}
10
  - OLLAMA_API_KEY=${OLLAMA_API_KEY}
11
  - API_PORT=${API_PORT}
12
  env_file:
 
16
  watch:
17
  - action: sync
18
  path: .
19
+ target: /code