propilot-calling-functions / chat_settings.py
cadasme's picture
feat: included custom helicone params
faedf96
raw
history blame
2.29 kB
import openai
import os
from calling_functions import (
ADD_DECIMAL_AND_HEXADECIMAL_FUNCTION_SCHEMA,
add_decimal_values, # noqa
add_hexadecimal_values, # noqa
)
# Definición de las funciones de Chat
def get_initial_message():
messages = [
{
"role": "system",
"content": "Hola, soy ProPilot. Si deseas probar el function calling que tengo configurado solo pregunta: Cual es la suma de 24 y el valor hexadecimal F?",
},
]
return messages
def get_chatgpt_response(messages, model):
openai.api_base = "https://oai.hconeai.com/v1"
HELICONE_API_KEY = os.getenv("HELICONE_API_KEY")
intermediate_results = []
while True:
response = openai.ChatCompletion.create(
model=model,
messages=messages,
functions=ADD_DECIMAL_AND_HEXADECIMAL_FUNCTION_SCHEMA,
temperature=0,
headers={
"Helicone-Auth": f"Bearer {HELICONE_API_KEY}",
"Helicone-Cache-Enabled": "true",
"Helicone-Property-App": "HuggingFaceProPilot",
"Helicone-Property-DataSource": "FunctionsCallingDemo",
}
)
if response.choices[0]["finish_reason"] == "stop":
final_answer = response.choices[0]["message"]["content"]
return final_answer
elif response.choices[0]["finish_reason"] == "function_call":
fn_name = response.choices[0]["message"]["function_call"]["name"]
arguments = response.choices[0]["message"]["function_call"]["arguments"]
function = globals()[fn_name]
result = function(arguments)
if isinstance(result, dict) and "result" in result:
result = result["result"]
intermediate_results.append(str(result))
# Remove intermediate results from the messages
messages = messages[:-len(intermediate_results)]
# Append the final answer as a system message
messages.append(
{
"role": "system",
"content": intermediate_results[-1]
}
)
def update_chat(messages, role, content):
messages.append(
{"role": role, "content": content},
)
return messages