Spaces:
Running
Running
added auth and get routes (#14)
Browse files- #update (7ea3d3f1c8cdc94568081cb243236ff6ca0de69c)
- Ars/ai_functions.py +2 -2
- Ars/controllers.py +30 -14
- Ars/repositories.py +48 -2
- Ars/routes.py +19 -3
- security/__init__.py +0 -0
- security/authDependency.py +14 -0
Ars/ai_functions.py
CHANGED
@@ -3,15 +3,15 @@ from io import BytesIO
|
|
3 |
from Ars.objects import ai,ResumeData,AutomationRiskResult,AutomationRiskInput,RealWorldQuestion,SkillDepthResult,SkillDepthInput,BreakDownByDomainUpdate,FlaggedRiskAreasUpdate,BoostSuggestionsUpdate,AICollabReadinessInput
|
4 |
from Ars.embedDoc import search_pinecone_text
|
5 |
from fastapi import UploadFile
|
6 |
-
|
7 |
def extract_text_from_bytes(pdf_bytes: bytes) -> str:
|
8 |
output_string = BytesIO()
|
9 |
with BytesIO(pdf_bytes) as input_stream:
|
10 |
extract_text_to_fp(input_stream, output_string)
|
11 |
return output_string.getvalue().decode()
|
12 |
|
|
|
13 |
|
14 |
-
async def resume_analysis(upload_file:UploadFile) -> ResumeData:
|
15 |
contents = await upload_file.read()
|
16 |
resume = extract_text_from_bytes(pdf_bytes=contents)
|
17 |
if resume:
|
|
|
3 |
from Ars.objects import ai,ResumeData,AutomationRiskResult,AutomationRiskInput,RealWorldQuestion,SkillDepthResult,SkillDepthInput,BreakDownByDomainUpdate,FlaggedRiskAreasUpdate,BoostSuggestionsUpdate,AICollabReadinessInput
|
4 |
from Ars.embedDoc import search_pinecone_text
|
5 |
from fastapi import UploadFile
|
|
|
6 |
def extract_text_from_bytes(pdf_bytes: bytes) -> str:
|
7 |
output_string = BytesIO()
|
8 |
with BytesIO(pdf_bytes) as input_stream:
|
9 |
extract_text_to_fp(input_stream, output_string)
|
10 |
return output_string.getvalue().decode()
|
11 |
|
12 |
+
|
13 |
|
14 |
+
async def resume_analysis(upload_file:UploadFile) -> dict[str, str | ResumeData]:
|
15 |
contents = await upload_file.read()
|
16 |
resume = extract_text_from_bytes(pdf_bytes=contents)
|
17 |
if resume:
|
Ars/controllers.py
CHANGED
@@ -1,19 +1,35 @@
|
|
1 |
from Ars.core import r
|
2 |
-
from Ars.repositories import create_boost_suggestions,create_breakdown_by_domain,create_flagged_risk_areas,create_user_resilience
|
3 |
from fastapi import UploadFile
|
4 |
from Ars.ai_functions import resume_analysis,calculate_automation_risk,calculate_Ai_collab_readiness,calculate_skill_depth,generate_boost_suggestion,generate_domain_breakdown,generate_flagged_risk_areas
|
5 |
from Ars.objects import AICollabReadiness,SkillDepthResult,AutomationRiskResult
|
6 |
-
|
7 |
-
resume= await resume_analysis(file)
|
8 |
-
risk = calculate_automation_risk(resume)
|
9 |
-
risk = AutomationRiskResult(**risk.model_dump())
|
10 |
-
skill_depth = calculate_skill_depth(resume)
|
11 |
-
skill_depth= SkillDepthResult(**skill_depth.model_dump())
|
12 |
-
ai_readiness = calculate_Ai_collab_readiness(resume)
|
13 |
-
ai_readiness = AICollabReadiness(**ai_readiness.model_dump())
|
14 |
-
ResilienceScore = ((1-(risk.result/100))*0.5+(skill_depth.result/100)*0.3+(ai_readiness.result/100)*0.2)
|
15 |
-
flagged_risk =generate_flagged_risk_areas(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
|
16 |
-
boost_suggestion = generate_boost_suggestion(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
|
17 |
-
domain_breakdown = generate_domain_breakdown(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
|
18 |
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from Ars.core import r
|
2 |
+
from Ars.repositories import get_document_by_hashed_doc,create_new_hashed_doc_entry,create_boost_suggestions,create_breakdown_by_domain,create_flagged_risk_areas,create_user_resilience
|
3 |
from fastapi import UploadFile
|
4 |
from Ars.ai_functions import resume_analysis,calculate_automation_risk,calculate_Ai_collab_readiness,calculate_skill_depth,generate_boost_suggestion,generate_domain_breakdown,generate_flagged_risk_areas
|
5 |
from Ars.objects import AICollabReadiness,SkillDepthResult,AutomationRiskResult
|
6 |
+
import hashlib
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
+
def get_document_hash(document_content: str) -> str:
|
9 |
+
# Ensure consistent encoding (e.g., UTF-8) before hashing
|
10 |
+
document_bytes = document_content.encode('utf-8')
|
11 |
+
hasher = hashlib.sha256()
|
12 |
+
hasher.update(document_bytes)
|
13 |
+
return hasher.hexdigest()
|
14 |
+
|
15 |
+
async def resilience_analysis(file:UploadFile,userId:str):
|
16 |
+
hashed_doc = get_document_hash(resume)
|
17 |
+
|
18 |
+
check = get_document_by_hashed_doc(hashed_doc=hashed_doc)
|
19 |
+
if check==None:
|
20 |
+
resume= await resume_analysis(file)
|
21 |
+
risk = calculate_automation_risk(resume)
|
22 |
+
risk = AutomationRiskResult(**risk.model_dump())
|
23 |
+
skill_depth = calculate_skill_depth(resume)
|
24 |
+
skill_depth= SkillDepthResult(**skill_depth.model_dump())
|
25 |
+
ai_readiness = calculate_Ai_collab_readiness(resume)
|
26 |
+
ai_readiness = AICollabReadiness(**ai_readiness.model_dump())
|
27 |
+
ResilienceScore = ((1-(risk.result/100))*0.5+(skill_depth.result/100)*0.3+(ai_readiness.result/100)*0.2)
|
28 |
+
flagged_risk =generate_flagged_risk_areas(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
|
29 |
+
boost_suggestion = generate_boost_suggestion(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
|
30 |
+
domain_breakdown = generate_domain_breakdown(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
|
31 |
+
final_analysis_response ={"overall score": ResilienceScore,"flagged Risk": flagged_risk,"boost suggestion":boost_suggestion,"domain breakdown":domain_breakdown,"resume":resume,"skil_depth":skill_depth,"risk":risk,"ai_readiness":ai_readiness}
|
32 |
+
await create_new_hashed_doc_entry(data={"hashed_doc":hashed_doc,"resume":final_analysis_response,"userId":userId})
|
33 |
+
return final_analysis_response
|
34 |
+
else:
|
35 |
+
return check
|
Ars/repositories.py
CHANGED
@@ -4,8 +4,54 @@ from fastapi import HTTPException
|
|
4 |
from typing import Optional, List
|
5 |
from motor.motor_asyncio import AsyncIOMotorDatabase
|
6 |
from Ars.objects import UserResilienceScoreCreate, UserResilienceScoreOut,BreakDownByDomainCreate,BreakDownByDomainOut,FlaggedRiskAreasCreate,FlaggedRiskAreasOut,BoostSuggestionsCreate,BoostSuggestionsOut,BoostSuggestionsUpdate,UserResilienceScoreUpdate,FlaggedRiskAreasUpdate,BreakDownByDomainUpdate
|
7 |
-
|
8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
async def create_user_resilience( data: UserResilienceScoreCreate) -> UserResilienceScoreOut:
|
10 |
"""
|
11 |
Create a new UserResilienceScore in the database.
|
|
|
4 |
from typing import Optional, List
|
5 |
from motor.motor_asyncio import AsyncIOMotorDatabase
|
6 |
from Ars.objects import UserResilienceScoreCreate, UserResilienceScoreOut,BreakDownByDomainCreate,BreakDownByDomainOut,FlaggedRiskAreasCreate,FlaggedRiskAreasOut,BoostSuggestionsCreate,BoostSuggestionsOut,BoostSuggestionsUpdate,UserResilienceScoreUpdate,FlaggedRiskAreasUpdate,BreakDownByDomainUpdate
|
7 |
+
import pymongo
|
8 |
+
|
9 |
+
async def create_new_hashed_doc_entry(data: dict):
|
10 |
+
# Ensure the 'hashed_doc' field is present in the input data
|
11 |
+
if "hashed_doc" not in data:
|
12 |
+
return {"error": "Missing 'hashed_doc' field in the data"}
|
13 |
+
|
14 |
+
try:
|
15 |
+
# Attempt to insert the document, ensuring hashed_doc is unique due to the index
|
16 |
+
result = await db.resume_resilience_analysis.insert_one(data)
|
17 |
+
return {"inserted_id": result.inserted_id} # Return the inserted ID
|
18 |
+
except pymongo.errors.DuplicateKeyError:
|
19 |
+
# Handle the case where a document with the same 'hashed_doc' already exists
|
20 |
+
return {"error": "Document with this 'hashed_doc' already exists"}
|
21 |
+
except Exception as e:
|
22 |
+
# Catch other exceptions (e.g., database connection issues)
|
23 |
+
return {"error": str(e)}
|
24 |
+
|
25 |
+
async def get_document_by_hashed_doc(hashed_doc: str):
|
26 |
+
try:
|
27 |
+
# Find the document using the hashed_doc field
|
28 |
+
document = await db.resume_resilience_analysis.find_one({"hashed_doc": hashed_doc})
|
29 |
+
|
30 |
+
if document:
|
31 |
+
return document['resume']
|
32 |
+
else:
|
33 |
+
return None
|
34 |
+
|
35 |
+
except Exception as e:
|
36 |
+
# Handle any other errors, like database issues
|
37 |
+
return {"error": str(e)}
|
38 |
+
|
39 |
+
|
40 |
+
async def get_document_by_userId(userId: str):
|
41 |
+
try:
|
42 |
+
# Find the document using the hashed_doc field
|
43 |
+
document = await db.resume_resilience_analysis.find_one({"userId": userId})
|
44 |
+
|
45 |
+
if document:
|
46 |
+
return document['resume']
|
47 |
+
else:
|
48 |
+
return None
|
49 |
+
|
50 |
+
except Exception as e:
|
51 |
+
# Handle any other errors, like database issues
|
52 |
+
return {"error": str(e)}
|
53 |
+
|
54 |
+
|
55 |
async def create_user_resilience( data: UserResilienceScoreCreate) -> UserResilienceScoreOut:
|
56 |
"""
|
57 |
Create a new UserResilienceScore in the database.
|
Ars/routes.py
CHANGED
@@ -1,15 +1,31 @@
|
|
1 |
import base64
|
2 |
-
from fastapi import FastAPI, File, UploadFile,HTTPException
|
3 |
from Ars.controllers import resilience_analysis
|
|
|
|
|
4 |
from Ars.embedDoc import upsert_text_with_chunks,search_pinecone_text
|
|
|
|
|
|
|
5 |
ARS = FastAPI()
|
6 |
|
|
|
|
|
|
|
|
|
7 |
@ARS.post("/risk-analysis")
|
8 |
-
async def perform_risk_analysis(file: UploadFile = File(...)):
|
9 |
if file.content_type != "application/pdf":
|
10 |
return HTTPException(status_code=400, detail={"error": "File must be a PDF."})
|
11 |
-
|
|
|
12 |
return ResilienceScore
|
13 |
|
14 |
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import base64
|
2 |
+
from fastapi import FastAPI, File, UploadFile,HTTPException,Depends
|
3 |
from Ars.controllers import resilience_analysis
|
4 |
+
from Ars.repositories import get_document_by_userId
|
5 |
+
from security.authDependency import verifyAccessToken
|
6 |
from Ars.embedDoc import upsert_text_with_chunks,search_pinecone_text
|
7 |
+
import hashlib
|
8 |
+
|
9 |
+
|
10 |
ARS = FastAPI()
|
11 |
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
|
16 |
@ARS.post("/risk-analysis")
|
17 |
+
async def perform_risk_analysis(user =Depends(verifyAccessToken) ,file: UploadFile = File(...), ):
|
18 |
if file.content_type != "application/pdf":
|
19 |
return HTTPException(status_code=400, detail={"error": "File must be a PDF."})
|
20 |
+
|
21 |
+
ResilienceScore = await resilience_analysis(file=file,userId=user['userId'])
|
22 |
return ResilienceScore
|
23 |
|
24 |
|
25 |
|
26 |
+
@ARS.get("/risk-analysis")
|
27 |
+
async def perform_risk_analysis(user=Depends(verifyAccessToken) ):
|
28 |
+
ResilienceScore = await get_document_by_userId(userId=user['userId'])
|
29 |
+
|
30 |
+
return ResilienceScore
|
31 |
+
|
security/__init__.py
ADDED
File without changes
|
security/authDependency.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi.security import HTTPBearer
|
2 |
+
from fastapi import Depends
|
3 |
+
from controller.imports import *
|
4 |
+
|
5 |
+
token_auth_scheme = HTTPBearer()
|
6 |
+
|
7 |
+
def verifyAccessToken(token:str =Depends(token_auth_scheme)):
|
8 |
+
credentials = token.credentials
|
9 |
+
decoded_user_id,decoded_access_token = decode_jwt(credentials)
|
10 |
+
is_valid = verify_access_token(db_uri=MONGO_URI, user_id=decoded_user_id, access_token=decoded_access_token)
|
11 |
+
if is_valid != True: # Example check
|
12 |
+
raise HTTPException(status_code=401, detail="Invalid token")
|
13 |
+
else:
|
14 |
+
return {"userId":decoded_user_id}
|