franky-v1 / src /routers /discovery.py
architojha's picture
adding files
4067b64
from fastapi import APIRouter
from llama_index.core.settings import Settings
from src.models.schemas import IntentRequestData, IntentResponseData
from src.workflows.workflow_discovery import SelfDiscoverWorkflow, JudgeWorkflow
from src.workflows.reasoning_modules import ML_EXPERT_PROMPT_TEMPLATE
# configurations
router = APIRouter()
@router.get("/")
async def read_root():
return "Self-Discovery Workflow for User Intent Interview is up!"
@router.post("/interview/", response_model=IntentResponseData)
async def interview_user(data: IntentRequestData):
try:
interview_workflow = SelfDiscoverWorkflow()
task = ML_EXPERT_PROMPT_TEMPLATE.format(query=data.query)
workflow_handler = interview_workflow.run(task=task, llm=Settings._llm)
intermediate_result = await workflow_handler
context = await workflow_handler.ctx.get("workflow_result")
judge_workflow = JudgeWorkflow()
completion_status = await judge_workflow.run(judging_context=intermediate_result, llm=Settings._llm)
return IntentResponseData(context=str(context),
result=intermediate_result,
count=data.count + 1,
complete=completion_status)
except Exception as e:
return {"detail": f"Error processing {e}"}