File size: 3,018 Bytes
02cfbd9
e5f590b
f8a0c51
cf49846
02cfbd9
dcb67c2
f8a0c51
dcb67c2
 
 
 
 
 
 
 
cf49846
 
 
dcb67c2
7aaae9b
dcb67c2
98d2c56
dcb67c2
 
 
 
 
 
 
 
 
 
eacbd9b
063a503
9c5f420
dcb67c2
e5f590b
 
 
 
 
 
 
dcb67c2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from Ars.core import r
from Ars.repositories import get_document_by_hashed_doc_complete,get_document_by_hashed_doc,create_new_hashed_doc_entry,create_boost_suggestions,create_breakdown_by_domain,create_flagged_risk_areas,create_user_resilience
from fastapi import UploadFile
from Ars.ai_functions import extract_text_from_bytes,resume_analysis,calculate_automation_risk,calculate_Ai_collab_readiness,calculate_skill_depth,generate_boost_suggestion,generate_domain_breakdown,generate_flagged_risk_areas
from Ars.objects import AICollabReadiness,SkillDepthResult,AutomationRiskResult 
import hashlib

def get_document_hash(document_content: str) -> str:
    # Ensure consistent encoding (e.g., UTF-8) before hashing
    document_bytes = document_content.encode('utf-8')
    hasher = hashlib.sha256()
    hasher.update(document_bytes)
    return hasher.hexdigest()

async def resilience_analysis(file:UploadFile,userId:str):
    contents = await file.read()
    resume_text = extract_text_from_bytes(pdf_bytes=contents)
    hashed_doc = get_document_hash(resume_text)
    
    check = await get_document_by_hashed_doc(hashed_doc=hashed_doc)
    if check==None:
        resume= await resume_analysis(contents)
        risk = calculate_automation_risk(resume)
        risk = AutomationRiskResult(**risk.model_dump())
        skill_depth =  calculate_skill_depth(resume)
        skill_depth= SkillDepthResult(**skill_depth.model_dump())
        ai_readiness = calculate_Ai_collab_readiness(resume)
        ai_readiness = AICollabReadiness(**ai_readiness.model_dump())
        ResilienceScore = ((1-(risk.result/100))*0.5+(skill_depth.result/100)*0.3+(ai_readiness.result/100)*0.2)
        flagged_risk =generate_flagged_risk_areas(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
        boost_suggestion = generate_boost_suggestion(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
        domain_breakdown = generate_domain_breakdown(resume=resume,skil_depth=skill_depth,risk=risk,ai_readiness=ai_readiness)
        final_analysis_response ={"overall score": ResilienceScore,"flagged Risk": flagged_risk.model_dump(),"boost suggestion":boost_suggestion.model_dump(),"domain breakdown":domain_breakdown.model_dump(),"resume":resume.model_dump(),"skil_depth":skill_depth.model_dump(),"risk":risk.model_dump(),"ai_readiness":ai_readiness.model_dump()}
        resultId =await create_new_hashed_doc_entry(data={"hashed_doc":hashed_doc,"resume":final_analysis_response,"userId":userId})
        return  final_analysis_response
    else:
        result_hash =await get_document_by_hashed_doc_complete(hashed_doc)
        if result_hash['hashed_doc']==hashed_doc and result_hash['userId']== userId:
            
            await create_new_hashed_doc_entry(data={"hashed_doc":hashed_doc,"resume":check,"userId":userId})
            return check
        
        await create_new_hashed_doc_entry(data={"hashed_doc":hashed_doc,"resume":check,"userId":userId})
        return check