File size: 2,050 Bytes
0972676
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import openai
import os

from  calling_functions import (
    ADD_DECIMAL_AND_HEXADECIMAL_FUNCTION_SCHEMA,
    add_decimal_values,  # noqa
    add_hexadecimal_values,  # noqa
)

# Definición de las funciones de Chat
def get_initial_message():
    messages = [
        {
            "role": "system",
            "content": "Hola, soy ProPilot. Si deseas probar el function calling que tengo configurado solo pregunta: Cual es la suma de 24 y el valor hexadecimal F?",
        },
    ]
    return messages


def get_chatgpt_response(messages, model):
    intermediate_results = []

    while True:
        response = openai.ChatCompletion.create(
            model=model,
            messages=messages,
            functions=ADD_DECIMAL_AND_HEXADECIMAL_FUNCTION_SCHEMA,
            temperature=0,
            headers={
                "Helicone-Auth": os.getenv('HELICONE_API_KEY'),
                "Helicone-Cache-Enabled": "true",
            }
        )

        if response.choices[0]["finish_reason"] == "stop":
            final_answer = response.choices[0]["message"]["content"]
            return final_answer

        elif response.choices[0]["finish_reason"] == "function_call":
            fn_name = response.choices[0]["message"]["function_call"]["name"]
            arguments = response.choices[0]["message"]["function_call"]["arguments"]

            function = globals()[fn_name]
            result = function(arguments)

            if isinstance(result, dict) and "result" in result:
                result = result["result"]

            intermediate_results.append(str(result))

        # Remove intermediate results from the messages
        messages = messages[:-len(intermediate_results)]

        # Append the final answer as a system message
        messages.append(
            {
                "role": "system",
                "content": intermediate_results[-1]
            }
        )


def update_chat(messages, role, content):
    messages.append(
        {"role": role, "content": content},
    )
    return messages