franky-v1 / src /workflows /graph_workflow.py
architojha's picture
adding files
4067b64
from src.utils.settings import settings
from llama_index.core.settings import Settings
from src.models.workflow_graph import GetResponseEvent, ConstructGraphEvent
from llama_index.core.workflow import (
Event,
StartEvent,
StopEvent,
Workflow,
Context,
step
)
import asyncio
from src.utils.helper import helper
class DesignGraphWorkflow(Workflow):
@step
async def GetRawWorkflow(self, ctx: Context, ev: StartEvent) -> GetResponseEvent:
_project_description = ev._project_description
llm = ev.llm
modules = settings.moduleList
prompt = helper._build_prompt(
project_desc=_project_description,
modules=modules
)
try:
response = await llm.acomplete(prompt)
except Exception as e:
raise (f"Error: {e}")
return GetResponseEvent(project_details=_project_description, rawResponse=response.text)
@step
def ExtractGraph(self, ctx: Context, ev: GetResponseEvent) -> ConstructGraphEvent:
raw_llm_response = ev.rawResponse
graph = helper._parse_llm_response(raw_response=raw_llm_response)
return ConstructGraphEvent(workflowGraph=graph)
@step
def exportGraph(self, ctx: Context, ev: ConstructGraphEvent) -> StopEvent:
graph = ev.workflowGraph
helper._store_graph(graph_data=graph)
return StopEvent(result=graph)