Spaces:
Sleeping
Sleeping
ars
Browse files- Ars/__init__.py +0 -0
- Ars/ai_functions.py +0 -0
- Ars/controllers.py +53 -0
- Ars/core.py +24 -0
- Ars/imports.py +0 -0
- Ars/objects.py +120 -0
- Ars/repositories.py +407 -0
- Ars/routes.py +0 -0
- README.md +1 -0
- gamification/pointLogic.py +4 -3
- requirements.txt +1 -0
Ars/__init__.py
ADDED
File without changes
|
Ars/ai_functions.py
ADDED
File without changes
|
Ars/controllers.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
# TODO: perform calculation Risk Calculation and also generate Breakdown by Domain from resume, generate Flagged Risk areas from resume and boost suggestions from resume
|
3 |
+
|
4 |
+
# NOTE: to perform risk calculation we use this formula ResilienceScore = (1 - AutomationRisk) * 0.5 + SkillDepth * 0.3 + AICollabReadiness * 0.2
|
5 |
+
# NOTE: - **Automation Risk** is inverted (lower risk = higher score)
|
6 |
+
# NOTE:- Normalize each sub-score to a scale of 0–100
|
7 |
+
# NOTE: - Final score is a 0–100 **Crayonics Resilience Score**
|
8 |
+
|
9 |
+
# TODO: things I'm gonna need 1. An AI , 2. Cache Controller 3. Database functions
|
10 |
+
from Ars.core import r
|
11 |
+
|
12 |
+
|
13 |
+
|
14 |
+
def analyze_resume(resume_text: str) -> dict:
|
15 |
+
# Step 1: Check cache
|
16 |
+
cached = r.get(resume_text)
|
17 |
+
if cached:
|
18 |
+
return cached
|
19 |
+
|
20 |
+
# Step 2: Extract skills, domains, and experience info
|
21 |
+
extracted_data = extract_resume_insights(resume_text)
|
22 |
+
|
23 |
+
# Step 3: Score sub-components
|
24 |
+
automation_risk = calculate_automation_risk(extracted_data)
|
25 |
+
skill_depth = calculate_skill_depth(extracted_data)
|
26 |
+
ai_collab_readiness = calculate_ai_collab_readiness(extracted_data)
|
27 |
+
|
28 |
+
# Normalize sub-scores to 0–100
|
29 |
+
automation_risk = normalize_score(1 - automation_risk)
|
30 |
+
skill_depth = normalize_score(skill_depth)
|
31 |
+
ai_collab_readiness = normalize_score(ai_collab_readiness)
|
32 |
+
|
33 |
+
# Step 4: Final score
|
34 |
+
resilience_score = (automation_risk * 0.5 +
|
35 |
+
skill_depth * 0.3 +
|
36 |
+
ai_collab_readiness * 0.2)
|
37 |
+
|
38 |
+
# Step 5: Breakdown, flags, suggestions
|
39 |
+
breakdown = generate_domain_breakdown(extracted_data)
|
40 |
+
risk_flags = identify_risk_flags(extracted_data)
|
41 |
+
suggestions = generate_boost_suggestions(risk_flags, extracted_data)
|
42 |
+
|
43 |
+
# Step 6: Package and cache
|
44 |
+
result = {
|
45 |
+
"resilience_score": round(resilience_score, 2),
|
46 |
+
"breakdown_by_domain": breakdown,
|
47 |
+
"flagged_risk_areas": risk_flags,
|
48 |
+
"boost_suggestions": suggestions,
|
49 |
+
}
|
50 |
+
r.setex(name=resume_text,value= result,time=3600)
|
51 |
+
db_controller.save_analysis_result(resume_text, result)
|
52 |
+
|
53 |
+
return result
|
Ars/core.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# db.py
|
2 |
+
import os
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
import redis
|
5 |
+
from motor.motor_asyncio import AsyncIOMotorClient
|
6 |
+
load_dotenv()
|
7 |
+
|
8 |
+
MONGO_URI = os.getenv('MONGO_URI')
|
9 |
+
DB_NAME = "crayonics"
|
10 |
+
REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
|
11 |
+
REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
|
12 |
+
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', None)
|
13 |
+
FERNET_SECRET_KEY = os.getenv('FERNET_SECRET_KEY')
|
14 |
+
REDIS_USERNAME=os.getenv('REDIS_USERNAME')
|
15 |
+
client = AsyncIOMotorClient(MONGO_URI)
|
16 |
+
db = client[DB_NAME]
|
17 |
+
r = redis.StrictRedis(
|
18 |
+
host=REDIS_HOST,
|
19 |
+
port=REDIS_PORT,
|
20 |
+
password=REDIS_PASSWORD,
|
21 |
+
username=REDIS_USERNAME,
|
22 |
+
db=0,
|
23 |
+
decode_responses=True
|
24 |
+
)
|
Ars/imports.py
ADDED
File without changes
|
Ars/objects.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
from pydantic import model_validator, BaseModel
|
3 |
+
from typing import List, Optional, Union
|
4 |
+
from bson import ObjectId
|
5 |
+
|
6 |
+
class UserResilienceScoreCreate(BaseModel):
|
7 |
+
|
8 |
+
overallScore: float
|
9 |
+
userId: str
|
10 |
+
BreakDownByDomainId: str
|
11 |
+
FlaggedRiskAreasId: str
|
12 |
+
BoostSuggestionsId: str
|
13 |
+
|
14 |
+
|
15 |
+
class UserResilienceScoreUpdate(BaseModel):
|
16 |
+
|
17 |
+
overallScore: Optional[float]=None
|
18 |
+
BreakDownByDomainId: Optional[str]=None
|
19 |
+
FlaggedRiskAreasId: Optional[str]=None
|
20 |
+
BoostSuggestionsId: Optional[str]=None
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
class BreakDownByDomainCreate(BaseModel):
|
25 |
+
userId:str
|
26 |
+
Technical:float
|
27 |
+
Creative:float
|
28 |
+
Strategy:float
|
29 |
+
Collaboration:float
|
30 |
+
|
31 |
+
|
32 |
+
class BreakDownByDomainUpdate(BaseModel):
|
33 |
+
|
34 |
+
Technical:Optional[float]=None
|
35 |
+
Creative:Optional[float]=None
|
36 |
+
Strategy:Optional[float]=None
|
37 |
+
Collaboration:Optional[float]=None
|
38 |
+
|
39 |
+
|
40 |
+
class FlaggedRiskAreasCreate(BaseModel):
|
41 |
+
userId:str
|
42 |
+
risk_areas:List[str]
|
43 |
+
|
44 |
+
class FlaggedRiskAreasUpdate(BaseModel):
|
45 |
+
|
46 |
+
risk_areas:List[str]
|
47 |
+
|
48 |
+
|
49 |
+
class BoostSuggestionsCreate(BaseModel):
|
50 |
+
|
51 |
+
boost_suggestions:List[str]
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
class BoostSuggestionsUpdate(BaseModel):
|
56 |
+
|
57 |
+
boost_suggestions:List[str]
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
class UserResilienceScoreOut(UserResilienceScoreCreate):
|
62 |
+
_id: Optional[ObjectId]=None # Make sure _id can be Optional
|
63 |
+
id:Optional[str]=None
|
64 |
+
# To convert MongoDB ObjectId to string
|
65 |
+
class Config:
|
66 |
+
json_encoders = {
|
67 |
+
ObjectId: str
|
68 |
+
}
|
69 |
+
|
70 |
+
# Custom validator to handle the ObjectId conversion if needed
|
71 |
+
@model_validator(mode='before')
|
72 |
+
def handle_objectid(cls, values):
|
73 |
+
if '_id' in values and isinstance(values['_id'], ObjectId):
|
74 |
+
values['id'] = str(values['_id']) # Convert ObjectId to string
|
75 |
+
return values
|
76 |
+
|
77 |
+
class BreakDownByDomainOut(BreakDownByDomainCreate):
|
78 |
+
_id: Optional[ObjectId]=None # Make sure _id can be Optional
|
79 |
+
id:Optional[str]=None
|
80 |
+
# To convert MongoDB ObjectId to string
|
81 |
+
class Config:
|
82 |
+
json_encoders = {
|
83 |
+
ObjectId: str
|
84 |
+
}
|
85 |
+
|
86 |
+
# Custom validator to handle the ObjectId conversion if needed
|
87 |
+
@model_validator(mode='before')
|
88 |
+
def handle_objectid(cls, values):
|
89 |
+
if '_id' in values and isinstance(values['_id'], ObjectId):
|
90 |
+
values['id'] = str(values['_id']) # Convert ObjectId to string
|
91 |
+
return values
|
92 |
+
|
93 |
+
class FlaggedRiskAreasOut(FlaggedRiskAreasCreate):
|
94 |
+
_id: Optional[ObjectId]=None # Make sure _id can be Optional
|
95 |
+
id:Optional[str]=None
|
96 |
+
class Config:
|
97 |
+
json_encoders = {
|
98 |
+
ObjectId: str
|
99 |
+
}
|
100 |
+
|
101 |
+
# Custom validator to handle the ObjectId conversion if needed
|
102 |
+
@model_validator(mode='before')
|
103 |
+
def handle_objectid(cls, values):
|
104 |
+
if '_id' in values and isinstance(values['_id'], ObjectId):
|
105 |
+
values['id'] = str(values['_id']) # Convert ObjectId to string
|
106 |
+
return values
|
107 |
+
class BoostSuggestionsOut(BoostSuggestionsCreate):
|
108 |
+
_id: Optional[ObjectId]=None # Make sure _id can be Optional
|
109 |
+
id:Optional[str]=None
|
110 |
+
class Config:
|
111 |
+
json_encoders = {
|
112 |
+
ObjectId: str
|
113 |
+
}
|
114 |
+
|
115 |
+
# Custom validator to handle the ObjectId conversion if needed
|
116 |
+
@model_validator(mode='before')
|
117 |
+
def handle_objectid(cls, values):
|
118 |
+
if '_id' in values and isinstance(values['_id'], ObjectId):
|
119 |
+
values['id'] = str(values['_id']) # Convert ObjectId to string
|
120 |
+
return values
|
Ars/repositories.py
ADDED
@@ -0,0 +1,407 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from Ars.core import db
|
2 |
+
from bson import ObjectId
|
3 |
+
from fastapi import HTTPException
|
4 |
+
from typing import Optional, List
|
5 |
+
from motor.motor_asyncio import AsyncIOMotorDatabase
|
6 |
+
from Ars.objects import UserResilienceScoreCreate, UserResilienceScoreOut,BreakDownByDomainCreate,BreakDownByDomainOut,FlaggedRiskAreasCreate,FlaggedRiskAreasOut,BoostSuggestionsCreate,BoostSuggestionsOut,BoostSuggestionsUpdate,UserResilienceScoreUpdate,FlaggedRiskAreasUpdate,BreakDownByDomainUpdate
|
7 |
+
|
8 |
+
|
9 |
+
async def create_user_resilience( data: UserResilienceScoreCreate) -> UserResilienceScoreOut:
|
10 |
+
"""
|
11 |
+
Create a new UserResilienceScore in the database.
|
12 |
+
|
13 |
+
Args:
|
14 |
+
db: The MongoDB database instance.
|
15 |
+
data: A Pydantic object containing the fields to create.
|
16 |
+
|
17 |
+
Returns:
|
18 |
+
The newly created object.
|
19 |
+
"""
|
20 |
+
result = await db.user_resilience.insert_one(data.model_dump())
|
21 |
+
created = await db.user_resilience.find_one({"_id": result.inserted_id})
|
22 |
+
out = UserResilienceScoreOut(**created)
|
23 |
+
return out
|
24 |
+
|
25 |
+
|
26 |
+
async def get_user_resilience( object_id: str) -> Optional[UserResilienceScoreOut]:
|
27 |
+
"""
|
28 |
+
Retrieve a UserResilienceScore by its ID.
|
29 |
+
|
30 |
+
Args:
|
31 |
+
db: The MongoDB database instance.
|
32 |
+
object_id: The ID of the object to retrieve.
|
33 |
+
|
34 |
+
Returns:
|
35 |
+
An bject, or raises 404 if not found.
|
36 |
+
"""
|
37 |
+
if not ObjectId.is_valid(object_id):
|
38 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
39 |
+
|
40 |
+
result = await db.user_resilience.find_one({"_id": ObjectId(object_id)})
|
41 |
+
|
42 |
+
if result is None:
|
43 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
44 |
+
out =UserResilienceScoreOut(**result)
|
45 |
+
return out
|
46 |
+
|
47 |
+
|
48 |
+
async def update_user_resilience( object_id: str, data: UserResilienceScoreUpdate) -> UserResilienceScoreOut:
|
49 |
+
"""
|
50 |
+
Update a UserResilienceScore by its ID.
|
51 |
+
|
52 |
+
Args:
|
53 |
+
db: The MongoDB database instance.
|
54 |
+
object_id: The ID of the object to update.
|
55 |
+
data: A Pydantic object with the updated fields.
|
56 |
+
|
57 |
+
Returns:
|
58 |
+
The updated object dictionary.
|
59 |
+
"""
|
60 |
+
if not ObjectId.is_valid(object_id):
|
61 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
62 |
+
|
63 |
+
update_data = {k: v for k, v in data.dict().items() if v is not None}
|
64 |
+
result = await db.user_resilience.update_one(
|
65 |
+
{"_id": ObjectId(object_id)},
|
66 |
+
{"$set": update_data}
|
67 |
+
)
|
68 |
+
|
69 |
+
if result.matched_count == 0:
|
70 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
71 |
+
|
72 |
+
updateresult =await db.user_resilience.find_one({"_id": ObjectId(object_id)})
|
73 |
+
out = UserResilienceScoreOut(**updateresult)
|
74 |
+
return out
|
75 |
+
|
76 |
+
|
77 |
+
|
78 |
+
async def delete_user_resilience( object_id: str) -> dict:
|
79 |
+
"""
|
80 |
+
Delete a UserResilienceScore by its ID.
|
81 |
+
|
82 |
+
Args:
|
83 |
+
db: The MongoDB database instance.
|
84 |
+
object_id: The ID of the object to delete.
|
85 |
+
|
86 |
+
Returns:
|
87 |
+
A confirmation message or raises 404 if object is not found.
|
88 |
+
"""
|
89 |
+
if not ObjectId.is_valid(object_id):
|
90 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
91 |
+
|
92 |
+
result = await db.user_resilience.delete_one({"_id": ObjectId(object_id)})
|
93 |
+
|
94 |
+
if result.deleted_count == 0:
|
95 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
96 |
+
|
97 |
+
return {"message": "Object deleted successfully"}
|
98 |
+
|
99 |
+
|
100 |
+
|
101 |
+
|
102 |
+
|
103 |
+
|
104 |
+
|
105 |
+
|
106 |
+
|
107 |
+
|
108 |
+
|
109 |
+
|
110 |
+
|
111 |
+
|
112 |
+
|
113 |
+
|
114 |
+
|
115 |
+
|
116 |
+
|
117 |
+
async def create_breakdown_by_domain( data: BreakDownByDomainCreate) -> BreakDownByDomainOut:
|
118 |
+
"""
|
119 |
+
Create a new BreakDownByDomain in the database.
|
120 |
+
|
121 |
+
Args:
|
122 |
+
db: The MongoDB database instance.
|
123 |
+
data: A Pydantic object containing the fields to create.
|
124 |
+
|
125 |
+
Returns:
|
126 |
+
A dictionary representing the newly created object.
|
127 |
+
"""
|
128 |
+
result = await db.breakdown_by_domain.insert_one(data.dict())
|
129 |
+
created = await db.breakdown_by_domain.find_one({"_id": result.inserted_id})
|
130 |
+
out = BreakDownByDomainOut(**created)
|
131 |
+
return out
|
132 |
+
|
133 |
+
|
134 |
+
async def get_breakdown_by_domain( object_id: str) -> Optional[ BreakDownByDomainOut]:
|
135 |
+
"""
|
136 |
+
Retrieve a BreakDownByDomain by its ID.
|
137 |
+
|
138 |
+
Args:
|
139 |
+
db: The MongoDB database instance.
|
140 |
+
object_id: The ID of the object to retrieve.
|
141 |
+
|
142 |
+
Returns:
|
143 |
+
A dictionary of the found object, or raises 404 if not found.
|
144 |
+
"""
|
145 |
+
if not ObjectId.is_valid(object_id):
|
146 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
147 |
+
|
148 |
+
result = await db.breakdown_by_domain.find_one({"_id": ObjectId(object_id)})
|
149 |
+
|
150 |
+
if result is None:
|
151 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
152 |
+
out = BreakDownByDomainOut(**result)
|
153 |
+
return out
|
154 |
+
|
155 |
+
|
156 |
+
async def update_breakdown_by_domain( object_id: str, data: BreakDownByDomainUpdate) -> BreakDownByDomainOut:
|
157 |
+
"""
|
158 |
+
Update a BreakDownByDomain by its ID.
|
159 |
+
|
160 |
+
Args:
|
161 |
+
db: The MongoDB database instance.
|
162 |
+
object_id: The ID of the object to update.
|
163 |
+
data: A Pydantic object with the updated fields.
|
164 |
+
|
165 |
+
Returns:
|
166 |
+
The updated object dictionary.
|
167 |
+
"""
|
168 |
+
if not ObjectId.is_valid(object_id):
|
169 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
170 |
+
|
171 |
+
update_data = {k: v for k, v in data.dict().items() if v is not None}
|
172 |
+
result = await db.breakdown_by_domain.update_one(
|
173 |
+
{"_id": ObjectId(object_id)},
|
174 |
+
{"$set": update_data}
|
175 |
+
)
|
176 |
+
|
177 |
+
if result.matched_count == 0:
|
178 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
179 |
+
|
180 |
+
updateresult =await db. breakdown_by_domain.find_one({"_id": ObjectId(object_id)})
|
181 |
+
out = BreakDownByDomainOut(**updateresult)
|
182 |
+
return out
|
183 |
+
|
184 |
+
|
185 |
+
|
186 |
+
async def delete_breakdown_by_domain( object_id: str) -> dict:
|
187 |
+
"""
|
188 |
+
Delete a BreakDownByDomain by its ID.
|
189 |
+
|
190 |
+
Args:
|
191 |
+
db: The MongoDB database instance.
|
192 |
+
object_id: The ID of the object to delete.
|
193 |
+
|
194 |
+
Returns:
|
195 |
+
A confirmation message or raises 404 if object is not found.
|
196 |
+
"""
|
197 |
+
if not ObjectId.is_valid(object_id):
|
198 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
199 |
+
|
200 |
+
result = await db.breakdown_by_domain.delete_one({"_id": ObjectId(object_id)})
|
201 |
+
|
202 |
+
if result.deleted_count == 0:
|
203 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
204 |
+
|
205 |
+
return {"message": "Object deleted successfully"}
|
206 |
+
|
207 |
+
|
208 |
+
|
209 |
+
|
210 |
+
|
211 |
+
|
212 |
+
|
213 |
+
|
214 |
+
|
215 |
+
async def create_flagged_risk_areas( data: FlaggedRiskAreasCreate) -> FlaggedRiskAreasOut:
|
216 |
+
"""
|
217 |
+
Create a new BreakDownByDomain in the database.
|
218 |
+
|
219 |
+
Args:
|
220 |
+
db: The MongoDB database instance.
|
221 |
+
data: A Pydantic object containing the fields to create.
|
222 |
+
|
223 |
+
Returns:
|
224 |
+
A dictionary representing the newly created object.
|
225 |
+
"""
|
226 |
+
result = await db.flagged_risk_areas.insert_one(data.dict())
|
227 |
+
created = await db.flagged_risk_areas.find_one({"_id": result.inserted_id})
|
228 |
+
out = FlaggedRiskAreasOut(**created)
|
229 |
+
return out
|
230 |
+
|
231 |
+
|
232 |
+
async def get_flagged_risk_areas( object_id: str) -> Optional[FlaggedRiskAreasOut]:
|
233 |
+
"""
|
234 |
+
Retrieve a FlaggedRiskAreas by its ID.
|
235 |
+
|
236 |
+
Args:
|
237 |
+
db: The MongoDB database instance.
|
238 |
+
object_id: The ID of the object to retrieve.
|
239 |
+
|
240 |
+
Returns:
|
241 |
+
A dictionary of the found object, or raises 404 if not found.
|
242 |
+
"""
|
243 |
+
if not ObjectId.is_valid(object_id):
|
244 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
245 |
+
|
246 |
+
result = await db.flagged_risk_areas.find_one({"_id": ObjectId(object_id)})
|
247 |
+
|
248 |
+
if result is None:
|
249 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
250 |
+
out =FlaggedRiskAreasOut(**result)
|
251 |
+
return out
|
252 |
+
|
253 |
+
|
254 |
+
async def update_flagged_risk_areas( object_id: str, data: FlaggedRiskAreasUpdate) -> FlaggedRiskAreasOut:
|
255 |
+
"""
|
256 |
+
Update a FlaggedRiskAreas by its ID.
|
257 |
+
|
258 |
+
Args:
|
259 |
+
db: The MongoDB database instance.
|
260 |
+
object_id: The ID of the object to update.
|
261 |
+
data: A Pydantic object with the updated fields.
|
262 |
+
|
263 |
+
Returns:
|
264 |
+
The updated object dictionary.
|
265 |
+
"""
|
266 |
+
if not ObjectId.is_valid(object_id):
|
267 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
268 |
+
|
269 |
+
update_data = {k: v for k, v in data.dict().items() if v is not None}
|
270 |
+
result = await db.flagged_risk_areas.update_one(
|
271 |
+
{"_id": ObjectId(object_id)},
|
272 |
+
{"$set": update_data}
|
273 |
+
)
|
274 |
+
|
275 |
+
if result.matched_count == 0:
|
276 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
277 |
+
|
278 |
+
updateresult =await db.flagged_risk_areas.find_one({"_id": ObjectId(object_id)})
|
279 |
+
out = FlaggedRiskAreasOut(**updateresult)
|
280 |
+
return out
|
281 |
+
|
282 |
+
|
283 |
+
|
284 |
+
async def delete_flagged_risk_areas( object_id: str) -> dict:
|
285 |
+
"""
|
286 |
+
Delete a FlaggedRiskAreas by its ID.
|
287 |
+
|
288 |
+
Args:
|
289 |
+
db: The MongoDB database instance.
|
290 |
+
object_id: The ID of the object to delete.
|
291 |
+
|
292 |
+
Returns:
|
293 |
+
A confirmation message or raises 404 if object is not found.
|
294 |
+
"""
|
295 |
+
if not ObjectId.is_valid(object_id):
|
296 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
297 |
+
|
298 |
+
result = await db.flagged_risk_areas.delete_one({"_id": ObjectId(object_id)})
|
299 |
+
|
300 |
+
if result.deleted_count == 0:
|
301 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
302 |
+
|
303 |
+
return {"message": "Object deleted successfully"}
|
304 |
+
|
305 |
+
|
306 |
+
|
307 |
+
|
308 |
+
|
309 |
+
|
310 |
+
|
311 |
+
|
312 |
+
|
313 |
+
|
314 |
+
|
315 |
+
|
316 |
+
|
317 |
+
|
318 |
+
|
319 |
+
async def create_boost_suggestions( data: BoostSuggestionsCreate) -> BoostSuggestionsOut:
|
320 |
+
"""
|
321 |
+
Create a new BoostSuggestions in the database.
|
322 |
+
|
323 |
+
Args:
|
324 |
+
db: The MongoDB database instance.
|
325 |
+
data: A Pydantic object containing the fields to create.
|
326 |
+
|
327 |
+
Returns:
|
328 |
+
A dictionary representing the newly created object.
|
329 |
+
"""
|
330 |
+
result = await db.boost_suggestions.insert_one(data.dict())
|
331 |
+
created = await db.boost_suggestions.find_one({"_id": result.inserted_id})
|
332 |
+
out = BoostSuggestionsOut(**created)
|
333 |
+
return out
|
334 |
+
|
335 |
+
|
336 |
+
async def get_boost_suggestions( object_id: str) -> Optional[BoostSuggestionsOut]:
|
337 |
+
"""
|
338 |
+
Retrieve a BoostSuggestions by its ID.
|
339 |
+
|
340 |
+
Args:
|
341 |
+
db: The MongoDB database instance.
|
342 |
+
object_id: The ID of the object to retrieve.
|
343 |
+
|
344 |
+
Returns:
|
345 |
+
A dictionary of the found object, or raises 404 if not found.
|
346 |
+
"""
|
347 |
+
if not ObjectId.is_valid(object_id):
|
348 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
349 |
+
|
350 |
+
result = await db.boost_suggestions.find_one({"_id": ObjectId(object_id)})
|
351 |
+
|
352 |
+
if result is None:
|
353 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
354 |
+
out =BoostSuggestionsOut(**result)
|
355 |
+
return out
|
356 |
+
|
357 |
+
|
358 |
+
async def update_boost_suggestions( object_id: str, data: BoostSuggestionsUpdate) -> BoostSuggestionsOut:
|
359 |
+
"""
|
360 |
+
Update a BoostSuggestions by its ID.
|
361 |
+
|
362 |
+
Args:
|
363 |
+
db: The MongoDB database instance.
|
364 |
+
object_id: The ID of the object to update.
|
365 |
+
data: A Pydantic object with the updated fields.
|
366 |
+
|
367 |
+
Returns:
|
368 |
+
The updated object dictionary.
|
369 |
+
"""
|
370 |
+
if not ObjectId.is_valid(object_id):
|
371 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
372 |
+
|
373 |
+
update_data = {k: v for k, v in data.dict().items() if v is not None}
|
374 |
+
result = await db.boost_suggestions.update_one(
|
375 |
+
{"_id": ObjectId(object_id)},
|
376 |
+
{"$set": update_data}
|
377 |
+
)
|
378 |
+
|
379 |
+
if result.matched_count == 0:
|
380 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
381 |
+
|
382 |
+
updateresult =await db.boost_suggestions.find_one({"_id": ObjectId(object_id)})
|
383 |
+
out = BoostSuggestionsOut(**updateresult)
|
384 |
+
return out
|
385 |
+
|
386 |
+
|
387 |
+
|
388 |
+
async def delete_boost_suggestions( object_id: str) -> dict:
|
389 |
+
"""
|
390 |
+
Delete a BoostSuggestions by its ID.
|
391 |
+
|
392 |
+
Args:
|
393 |
+
db: The MongoDB database instance.
|
394 |
+
object_id: The ID of the object to delete.
|
395 |
+
|
396 |
+
Returns:
|
397 |
+
A confirmation message or raises 404 if object is not found.
|
398 |
+
"""
|
399 |
+
if not ObjectId.is_valid(object_id):
|
400 |
+
raise HTTPException(status_code=400, detail="Invalid ID format")
|
401 |
+
|
402 |
+
result = await db.boost_suggestions.delete_one({"_id": ObjectId(object_id)})
|
403 |
+
|
404 |
+
if result.deleted_count == 0:
|
405 |
+
raise HTTPException(status_code=404, detail="Object not found")
|
406 |
+
|
407 |
+
return {"message": "Object deleted successfully"}
|
Ars/routes.py
ADDED
File without changes
|
README.md
CHANGED
@@ -8,3 +8,4 @@ pinned: false
|
|
8 |
license: apache-2.0
|
9 |
---
|
10 |
|
|
|
|
8 |
license: apache-2.0
|
9 |
---
|
10 |
|
11 |
+
if you are changing the connection string in the env to a new one ensure you populate the levels table with levels up to level9 for a default career path and default level name else points won't be calculated properly
|
gamification/pointLogic.py
CHANGED
@@ -93,14 +93,15 @@ def get_all_simple_points_func(userId) -> SimpleIndividualUserLevel:
|
|
93 |
db = client[db_name]
|
94 |
collection = db[collection_name]
|
95 |
dreamJob = get_dream_job(userId=userId)
|
96 |
-
|
97 |
point_cursor = collection.find({"userId": userId})
|
98 |
try:
|
99 |
points_list = list(point_cursor)
|
100 |
-
|
101 |
totalPoints = sum([point['numOfPoints'] for point in points_list])
|
|
|
102 |
particularLevelInfo = get_particular_level(dreamJob=dreamJob,totalPoints=totalPoints)
|
103 |
-
|
104 |
points = SimpleIndividualUserLevel(totalpoints=totalPoints,levelName=particularLevelInfo[0].levelName,maxPoints=particularLevelInfo[0].maxPoints,minPoints=particularLevelInfo[0].minPoints,levelNumber=particularLevelInfo[0].levelNumber)
|
105 |
except:
|
106 |
totalPoints = 0
|
|
|
93 |
db = client[db_name]
|
94 |
collection = db[collection_name]
|
95 |
dreamJob = get_dream_job(userId=userId)
|
96 |
+
print(dreamJob)
|
97 |
point_cursor = collection.find({"userId": userId})
|
98 |
try:
|
99 |
points_list = list(point_cursor)
|
100 |
+
|
101 |
totalPoints = sum([point['numOfPoints'] for point in points_list])
|
102 |
+
|
103 |
particularLevelInfo = get_particular_level(dreamJob=dreamJob,totalPoints=totalPoints)
|
104 |
+
print(particularLevelInfo)
|
105 |
points = SimpleIndividualUserLevel(totalpoints=totalPoints,levelName=particularLevelInfo[0].levelName,maxPoints=particularLevelInfo[0].maxPoints,minPoints=particularLevelInfo[0].minPoints,levelNumber=particularLevelInfo[0].levelNumber)
|
106 |
except:
|
107 |
totalPoints = 0
|
requirements.txt
CHANGED
@@ -2,6 +2,7 @@ fastapi[all]
|
|
2 |
requests
|
3 |
python-dotenv
|
4 |
pymupdf
|
|
|
5 |
pinecone
|
6 |
sentence-transformers
|
7 |
einops
|
|
|
2 |
requests
|
3 |
python-dotenv
|
4 |
pymupdf
|
5 |
+
motor
|
6 |
pinecone
|
7 |
sentence-transformers
|
8 |
einops
|