eagle0504 commited on
Commit
c66249c
·
verified ·
1 Parent(s): e4026af

Update helper/utils.py

Browse files
Files changed (1) hide show
  1. helper/utils.py +38 -6
helper/utils.py CHANGED
@@ -52,11 +52,6 @@ def current_year():
52
  # return [text_list, sources_list]
53
 
54
 
55
- from typing import List, Tuple
56
-
57
- import PyPDF2
58
-
59
-
60
  def read_and_textify(
61
  files: List[str], chunk_size: int = 2 # Default chunk size set to 50
62
  ) -> Tuple[List[str], List[str]]:
@@ -134,6 +129,43 @@ def list_to_nums(sentences: List[str]) -> List[List[float]]:
134
  return embeddings
135
 
136
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
  def quantize_to_kbit(arr: Union[np.ndarray, Any], k: int = 16) -> np.ndarray:
138
  """Converts an array to a k-bit representation by normalizing and scaling its values.
139
 
@@ -256,4 +288,4 @@ def query_search(
256
  # Sort the DataFrame based on the 'qim' score in descending order
257
  refs = refs.sort_values(by="qim", ascending=False)
258
 
259
- return refs
 
52
  # return [text_list, sources_list]
53
 
54
 
 
 
 
 
 
55
  def read_and_textify(
56
  files: List[str], chunk_size: int = 2 # Default chunk size set to 50
57
  ) -> Tuple[List[str], List[str]]:
 
129
  return embeddings
130
 
131
 
132
+ def call_gpt4(prompt: str) -> str:
133
+ """
134
+ Sends a prompt to the GPT-4 model and retrieves a response.
135
+
136
+ This function interacts with the OpenAI API, specifically using
137
+ the gpt-3.5-turbo model to generate a conversational response.
138
+ It simulates a conversation by providing system messages,
139
+ past interactions, and the latest user query.
140
+
141
+ Args:
142
+ - prompt (str): The message from the user for which the GPT model will generate a response.
143
+
144
+ Returns:
145
+ - str: The content of the message generated by the GPT model in response to the prompt.
146
+
147
+ Note: This function assumes that 'client' is an instance of OpenAI's client object
148
+ that has been properly authenticated and initialized elsewhere in your code.
149
+ """
150
+
151
+ # Interact with the OpenAI API to get a response for the prompt provided
152
+ response = client.chat.completions.create(
153
+ model="gpt-3.5-turbo", # Specifies the AI model to use for the response
154
+ messages=[ # Constructs the context and the prompt for the AI
155
+ {"role": "system", "content": "You are a helpful assistant."},
156
+ {"role": "user", "content": "Who won the world series in 2020?"},
157
+ {
158
+ "role": "assistant",
159
+ "content": "The Los Angeles Dodgers won the World Series in 2020."
160
+ },
161
+ {"role": "user", "content": "Where was it played?"}
162
+ ]
163
+ )
164
+
165
+ # Return the AI's response to the user's most recent prompt
166
+ return response.choices[0].message.content
167
+
168
+
169
  def quantize_to_kbit(arr: Union[np.ndarray, Any], k: int = 16) -> np.ndarray:
170
  """Converts an array to a k-bit representation by normalizing and scaling its values.
171
 
 
288
  # Sort the DataFrame based on the 'qim' score in descending order
289
  refs = refs.sort_values(by="qim", ascending=False)
290
 
291
+ return refs