File size: 9,910 Bytes
105b369
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
from typing import Any, Optional, Dict, List, Union, Callable

from pydantic import BaseModel, ConfigDict

from phi.assistant.openai.run import Run
from phi.assistant.openai.message import Message
from phi.assistant.openai.assistant import OpenAIAssistant
from phi.assistant.openai.exceptions import ThreadIdNotSet
from phi.utils.log import logger

try:
    from openai import OpenAI
    from openai.types.beta.assistant import Assistant as OpenAIAssistantType
    from openai.types.beta.thread import Thread as OpenAIThread
    from openai.types.beta.thread_deleted import ThreadDeleted as OpenAIThreadDeleted
except ImportError:
    logger.error("`openai` not installed")
    raise


class Thread(BaseModel):
    # -*- Thread settings
    # Thread id which can be referenced in API endpoints.
    id: Optional[str] = None
    # The object type, populated by the API. Always thread.
    object: Optional[str] = None

    # OpenAIAssistant used for this thread
    assistant: Optional[OpenAIAssistant] = None
    # The ID of the assistant for this thread.
    assistant_id: Optional[str] = None

    # Set of 16 key-value pairs that can be attached to an object.
    # This can be useful for storing additional information about the object in a structured format.
    # Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
    metadata: Optional[Dict[str, Any]] = None

    # True if this thread is active
    is_active: bool = True
    # The Unix timestamp (in seconds) for when the thread was created.
    created_at: Optional[int] = None

    openai: Optional[OpenAI] = None
    openai_thread: Optional[OpenAIThread] = None
    openai_assistant: Optional[OpenAIAssistantType] = None

    model_config = ConfigDict(arbitrary_types_allowed=True)

    @property
    def client(self) -> OpenAI:
        return self.openai or OpenAI()

    @property
    def messages(self) -> List[Message]:
        # Returns A list of messages in this thread.
        try:
            return self.get_messages()
        except ThreadIdNotSet:
            return []

    def load_from_openai(self, openai_thread: OpenAIThread):
        self.id = openai_thread.id
        self.object = openai_thread.object
        self.created_at = openai_thread.created_at
        self.openai_thread = openai_thread

    def create(self, messages: Optional[List[Union[Message, Dict]]] = None) -> "Thread":
        request_body: Dict[str, Any] = {}
        if messages is not None:
            _messages = []
            for _message in messages:
                if isinstance(_message, Message):
                    _messages.append(_message.to_dict())
                else:
                    _messages.append(_message)
            request_body["messages"] = _messages
        if self.metadata is not None:
            request_body["metadata"] = self.metadata

        self.openai_thread = self.client.beta.threads.create(**request_body)
        self.load_from_openai(self.openai_thread)
        logger.debug(f"Thread created: {self.id}")
        return self

    def get_id(self) -> Optional[str]:
        return self.id or self.openai_thread.id if self.openai_thread else None

    def get_from_openai(self) -> OpenAIThread:
        _thread_id = self.get_id()
        if _thread_id is None:
            raise ThreadIdNotSet("Thread.id not set")

        self.openai_thread = self.client.beta.threads.retrieve(
            thread_id=_thread_id,
        )
        self.load_from_openai(self.openai_thread)
        return self.openai_thread

    def get(self, use_cache: bool = True) -> "Thread":
        if self.openai_thread is not None and use_cache:
            return self

        self.get_from_openai()
        return self

    def get_or_create(self, use_cache: bool = True, messages: Optional[List[Union[Message, Dict]]] = None) -> "Thread":
        try:
            return self.get(use_cache=use_cache)
        except ThreadIdNotSet:
            return self.create(messages=messages)

    def update(self) -> "Thread":
        try:
            thread_to_update = self.get_from_openai()
            if thread_to_update is not None:
                request_body: Dict[str, Any] = {}
                if self.metadata is not None:
                    request_body["metadata"] = self.metadata

                self.openai_thread = self.client.beta.threads.update(
                    thread_id=thread_to_update.id,
                    **request_body,
                )
                self.load_from_openai(self.openai_thread)
                logger.debug(f"Thead updated: {self.id}")
                return self
            raise ValueError("Thread not available")
        except ThreadIdNotSet:
            logger.warning("Thread not available")
            raise

    def delete(self) -> OpenAIThreadDeleted:
        try:
            thread_to_delete = self.get_from_openai()
            if thread_to_delete is not None:
                deletion_status = self.client.beta.threads.delete(
                    thread_id=thread_to_delete.id,
                )
                logger.debug(f"Thread deleted: {self.id}")
                return deletion_status
        except ThreadIdNotSet:
            logger.warning("Thread not available")
            raise

    def add_message(self, message: Union[Message, Dict]) -> None:
        try:
            message = message if isinstance(message, Message) else Message(**message)
        except Exception as e:
            logger.error(f"Error creating Message: {e}")
            raise
        message.thread_id = self.id
        message.create()

    def add(self, messages: List[Union[Message, Dict]]) -> None:
        existing_thread = self.get_id() is not None
        if existing_thread:
            for message in messages:
                self.add_message(message=message)
        else:
            self.create(messages=messages)

    def run(
        self,
        message: Optional[Union[str, Message]] = None,
        assistant: Optional[OpenAIAssistant] = None,
        assistant_id: Optional[str] = None,
        run: Optional[Run] = None,
        wait: bool = True,
        callback: Optional[Callable] = None,
    ) -> Run:
        if message is not None:
            if isinstance(message, str):
                message = Message(role="user", content=message)
            self.add(messages=[message])

        try:
            _thread_id = self.get_id()
            if _thread_id is None:
                _thread_id = self.get_from_openai().id
        except ThreadIdNotSet:
            logger.error("Thread not available")
            raise

        _assistant = assistant or self.assistant
        _assistant_id = assistant_id or self.assistant_id

        _run = run or Run()
        return _run.run(
            thread_id=_thread_id, assistant=_assistant, assistant_id=_assistant_id, wait=wait, callback=callback
        )

    def get_messages(self) -> List[Message]:
        try:
            _thread_id = self.get_id()
            if _thread_id is None:
                _thread_id = self.get_from_openai().id
        except ThreadIdNotSet:
            logger.warning("Thread not available")
            raise

        thread_messages = self.client.beta.threads.messages.list(
            thread_id=_thread_id,
        )
        return [Message.from_openai(message=message) for message in thread_messages]

    def to_dict(self) -> Dict[str, Any]:
        return self.model_dump(exclude_none=True, include={"id", "object", "messages", "metadata"})

    def pprint(self):
        """Pretty print using rich"""
        from rich.pretty import pprint

        pprint(self.to_dict())

    def print_messages(self) -> None:
        from rich.table import Table
        from rich.box import ROUNDED
        from rich.markdown import Markdown
        from phi.cli.console import console

        # Get the messages from the thread
        messages = self.get_messages()

        # Print the response
        table = Table(
            box=ROUNDED,
            border_style="blue",
            expand=True,
        )
        for m in messages[::-1]:
            if m.role == "user":
                table.add_column("User")
                table.add_column(m.get_content_with_files())
            elif m.role == "assistant":
                table.add_row("OpenAIAssistant", Markdown(m.get_content_with_files()))
                table.add_section()
            else:
                table.add_row(m.role, Markdown(m.get_content_with_files()))
                table.add_section()
        console.print(table)

    def print_response(
        self, message: str, assistant: OpenAIAssistant, current_message_only: bool = False, markdown: bool = False
    ) -> None:
        from rich.progress import Progress, SpinnerColumn, TextColumn

        with Progress(SpinnerColumn(spinner_name="dots"), TextColumn("{task.description}"), transient=True) as progress:
            progress.add_task("Working...")
            self.run(
                message=message,
                assistant=assistant,
                wait=True,
            )

        if current_message_only:
            response_messages = []
            for m in self.messages:
                if m.role == "assistant":
                    response_messages.append(m)
                elif m.role == "user" and m.get_content_text() == message:
                    break

            total_messages = len(response_messages)
            for idx, response_message in enumerate(response_messages[::-1], start=1):
                response_message.pprint(
                    title=f"[bold] :robot: OpenAIAssistant ({idx}/{total_messages}) [/bold]", markdown=markdown
                )
        else:
            for m in self.messages[::-1]:
                m.pprint(markdown=markdown)

    def __str__(self) -> str:
        import json

        return json.dumps(self.to_dict(), indent=4)