File size: 675 Bytes
d660b02
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
from __future__ import annotations

from llm_engineering.domain.inference import Inference
from llm_engineering.settings import settings


class InferenceExecutor:
    def __init__(

        self,

        llm: Inference,

        query: str,

        context: str | None = None,

    ) -> None:
        self.llm = llm
        self.query = query
        self.context = context if context else ""


    def execute(self) -> str:
        print("Setting payload")
        self.llm.set_payload(
            query=self.query,
            context=self.context,
        )
        answer = self.llm.inference()
        print(type(answer))
        return answer