Spaces:
Sleeping
Sleeping
Commit
·
35cb430
1
Parent(s):
65e70fd
refactor: message formatting in LLMClient.execute_mistral_sdk
Browse files
medrag_multi_modal/assistant/llm_client.py
CHANGED
@@ -57,17 +57,22 @@ class LLMClient(weave.Model):
|
|
57 |
[system_prompt] if isinstance(system_prompt, str) else system_prompt
|
58 |
)
|
59 |
user_prompt = [user_prompt] if isinstance(user_prompt, str) else user_prompt
|
60 |
-
|
|
|
61 |
for prompt in user_prompt:
|
62 |
if isinstance(prompt, Image.Image):
|
63 |
-
|
64 |
{
|
65 |
"type": "image_url",
|
66 |
"image_url": base64_encode_image(prompt, "image/png"),
|
67 |
}
|
68 |
)
|
69 |
else:
|
70 |
-
|
|
|
|
|
|
|
|
|
71 |
|
72 |
client = Mistral(api_key=os.environ.get("MISTRAL_API_KEY"))
|
73 |
client = instructor.from_mistral(client)
|
|
|
57 |
[system_prompt] if isinstance(system_prompt, str) else system_prompt
|
58 |
)
|
59 |
user_prompt = [user_prompt] if isinstance(user_prompt, str) else user_prompt
|
60 |
+
system_messages = [{"type": "text", "text": prompt} for prompt in system_prompt]
|
61 |
+
user_messages = []
|
62 |
for prompt in user_prompt:
|
63 |
if isinstance(prompt, Image.Image):
|
64 |
+
user_messages.append(
|
65 |
{
|
66 |
"type": "image_url",
|
67 |
"image_url": base64_encode_image(prompt, "image/png"),
|
68 |
}
|
69 |
)
|
70 |
else:
|
71 |
+
user_messages.append({"type": "text", "text": prompt})
|
72 |
+
messages = [
|
73 |
+
{"role": "system", "content": system_messages},
|
74 |
+
{"role": "user", "content": user_messages},
|
75 |
+
]
|
76 |
|
77 |
client = Mistral(api_key=os.environ.get("MISTRAL_API_KEY"))
|
78 |
client = instructor.from_mistral(client)
|