File size: 3,632 Bytes
105b369
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import json
from uuid import uuid4
from typing import List, Any, Optional, Dict, Union, Iterator

from pydantic import BaseModel, ConfigDict, field_validator, Field

from phi.assistant import Assistant


class Task(BaseModel):
    # -*- Task settings
    # Task name
    name: Optional[str] = None
    # Task UUID (autogenerated if not set)
    task_id: Optional[str] = Field(None, validate_default=True)
    # Task description
    description: Optional[str] = None

    # Assistant to run this task
    assistant: Optional[Assistant] = None
    # Reviewer for this task. Set reviewer=True for a default reviewer
    reviewer: Optional[Union[Assistant, bool]] = None

    # -*- Task Output
    # Final output of this Task
    output: Optional[Any] = None
    # If True, shows the output of the task in the workflow.run() function
    show_output: bool = True
    # Save the output to a file
    save_output_to_file: Optional[str] = None

    # Cached values: do not set these directly
    _assistant: Optional[Assistant] = None

    model_config = ConfigDict(arbitrary_types_allowed=True)

    @field_validator("task_id", mode="before")
    def set_task_id(cls, v: Optional[str]) -> str:
        return v if v is not None else str(uuid4())

    @property
    def streamable(self) -> bool:
        return self.get_assistant().streamable

    def get_task_output_as_str(self) -> Optional[str]:
        if self.output is None:
            return None

        if isinstance(self.output, str):
            return self.output

        if issubclass(self.output.__class__, BaseModel):
            # Convert current_task_message to json if it is a BaseModel
            return self.output.model_dump_json(exclude_none=True, indent=2)

        try:
            return json.dumps(self.output, indent=2)
        except Exception:
            return str(self.output)
        finally:
            return None

    def get_assistant(self) -> Assistant:
        if self._assistant is None:
            self._assistant = self.assistant or Assistant()
        return self._assistant

    def _run(
        self,
        message: Optional[Union[List, Dict, str]] = None,
        *,
        stream: bool = True,
        **kwargs: Any,
    ) -> Iterator[str]:
        assistant = self.get_assistant()
        assistant.task = self.description

        assistant_output = ""
        if stream and self.streamable:
            for chunk in assistant.run(message=message, stream=True, **kwargs):
                assistant_output += chunk if isinstance(chunk, str) else ""
                if self.show_output:
                    yield chunk if isinstance(chunk, str) else ""
        else:
            assistant_output = assistant.run(message=message, stream=False, **kwargs)  # type: ignore

        self.output = assistant_output
        if self.save_output_to_file:
            fn = self.save_output_to_file.format(name=self.name, task_id=self.task_id)
            with open(fn, "w") as f:
                f.write(self.output)

        # -*- Yield task output if not streaming
        if not stream:
            if self.show_output:
                yield self.output
            else:
                yield ""

    def run(
        self,
        message: Optional[Union[List, Dict, str]] = None,
        *,
        stream: bool = True,
        **kwargs: Any,
    ) -> Union[Iterator[str], str, BaseModel]:
        if stream and self.streamable:
            resp = self._run(message=message, stream=True, **kwargs)
            return resp
        else:
            resp = self._run(message=message, stream=False, **kwargs)
            return next(resp)