RickyIG commited on
Commit
0ae9866
·
1 Parent(s): 4327d80

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -11,7 +11,11 @@ from IPython import get_ipython
11
  # from termcolor import colored # doesn't actually work in Colab ¯\_(ツ)_/¯
12
 
13
  GPT_MODEL = "gpt-3.5-turbo-1106"
14
- openai.api_key = os.environ['OPENAI_API_KEY']
 
 
 
 
15
 
16
  def exec_python(cell):
17
  ipython = get_ipython()
@@ -23,7 +27,6 @@ def exec_python(cell):
23
  log += f"\n{result.error_in_exec}"
24
  prompt = """You are a genius math tutor, Python code expert, and a helpful assistant.
25
  answer = {ans}
26
-
27
  Please answer user questions very well with explanations and match it with the multiple choices question.
28
  """.format(ans = log)
29
  return log
@@ -98,7 +101,6 @@ def openai_api_calculate_cost(usage,model=GPT_MODEL):
98
  def chat_completion_request(messages, functions=None, function_call=None, model=GPT_MODEL):
99
  """
100
  This function sends a POST request to the OpenAI API to generate a chat completion.
101
-
102
  Parameters:
103
  - messages (list): A list of message objects. Each object should have a 'role' (either 'system', 'user', or 'assistant') and 'content'
104
  (the content of the message).
@@ -106,7 +108,6 @@ def chat_completion_request(messages, functions=None, function_call=None, model=
106
  - function_call (str or dict, optional): If it's a string, it can be either 'auto' (the model decides whether to call a function) or 'none'
107
  (the model will not call a function). If it's a dict, it should describe the function to call.
108
  - model (str): The ID of the model to use.
109
-
110
  Returns:
111
  - response (requests.Response): The response from the OpenAI API. If the request was successful, the response's JSON will contain the chat completion.
112
  """
@@ -189,7 +190,7 @@ def function_call_process(assistant_message):
189
 
190
  # print(result)
191
 
192
- def second_call(prompt):
193
  # Add a new message to the conversation with the function result
194
  messages.append({
195
  "role": "function",
@@ -216,7 +217,7 @@ def main_function(init_prompt, prompt, user_input):
216
  first_call_result, cost1 = first_call(init_prompt, user_input)
217
  function_call_process_result = function_call_process(first_call_result)
218
  second_prompt_build_result = second_prompt_build(prompt, function_call_process_result)
219
- second_call_result, cost2 = second_call(second_prompt_build_result)
220
  return first_call_result, function_call_process_result, second_call_result, cost1, cost2
221
 
222
  def gradio_function():
 
11
  # from termcolor import colored # doesn't actually work in Colab ¯\_(ツ)_/¯
12
 
13
  GPT_MODEL = "gpt-3.5-turbo-1106"
14
+
15
+ from google.colab import userdata
16
+ openai.api_key = userdata.get('OPENAI_API_KEY')
17
+
18
+ messages=[]
19
 
20
  def exec_python(cell):
21
  ipython = get_ipython()
 
27
  log += f"\n{result.error_in_exec}"
28
  prompt = """You are a genius math tutor, Python code expert, and a helpful assistant.
29
  answer = {ans}
 
30
  Please answer user questions very well with explanations and match it with the multiple choices question.
31
  """.format(ans = log)
32
  return log
 
101
  def chat_completion_request(messages, functions=None, function_call=None, model=GPT_MODEL):
102
  """
103
  This function sends a POST request to the OpenAI API to generate a chat completion.
 
104
  Parameters:
105
  - messages (list): A list of message objects. Each object should have a 'role' (either 'system', 'user', or 'assistant') and 'content'
106
  (the content of the message).
 
108
  - function_call (str or dict, optional): If it's a string, it can be either 'auto' (the model decides whether to call a function) or 'none'
109
  (the model will not call a function). If it's a dict, it should describe the function to call.
110
  - model (str): The ID of the model to use.
 
111
  Returns:
112
  - response (requests.Response): The response from the OpenAI API. If the request was successful, the response's JSON will contain the chat completion.
113
  """
 
190
 
191
  # print(result)
192
 
193
+ def second_call(prompt, result, function_name = "exec_python"):
194
  # Add a new message to the conversation with the function result
195
  messages.append({
196
  "role": "function",
 
217
  first_call_result, cost1 = first_call(init_prompt, user_input)
218
  function_call_process_result = function_call_process(first_call_result)
219
  second_prompt_build_result = second_prompt_build(prompt, function_call_process_result)
220
+ second_call_result, cost2 = second_call(second_prompt_build_result, function_call_process_result)
221
  return first_call_result, function_call_process_result, second_call_result, cost1, cost2
222
 
223
  def gradio_function():