Spaces:
Sleeping
Sleeping
Update actions/actions.py
Browse files- actions/actions.py +52 -30
actions/actions.py
CHANGED
@@ -10,8 +10,60 @@ from rasa_sdk.events import SlotSet, FollowupAction
|
|
10 |
from rasa_sdk.executor import CollectingDispatcher
|
11 |
import random
|
12 |
import os
|
|
|
13 |
import openai
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
class GeneralHelp(Action):
|
16 |
def name(self) -> Text:
|
17 |
return "action_general_help"
|
@@ -153,33 +205,3 @@ class SayHelloWorld(Action):
|
|
153 |
# Output the generated response to user
|
154 |
generated_text = response.choices[0].text
|
155 |
dispatcher.utter_message(text=generated_text)
|
156 |
-
|
157 |
-
|
158 |
-
class GetOpenAIResponse(Action):
|
159 |
-
|
160 |
-
def name(self) -> Text:
|
161 |
-
return "action_get_response_openai"
|
162 |
-
|
163 |
-
def run(self,
|
164 |
-
dispatcher: CollectingDispatcher,
|
165 |
-
tracker: Tracker,
|
166 |
-
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
|
167 |
-
|
168 |
-
# Use OpenAI API to generate a response
|
169 |
-
secret_value_0 = os.environ.get("openai")
|
170 |
-
openai.api_key = secret_value_0
|
171 |
-
model_engine = "text-davinci-002"
|
172 |
-
prompt_template = tracker.latest_message.get('text')
|
173 |
-
|
174 |
-
response = openai.Completion.create(
|
175 |
-
engine=model_engine,
|
176 |
-
prompt=prompt_template,
|
177 |
-
max_tokens=124,
|
178 |
-
temperature=0.8,
|
179 |
-
n=1,
|
180 |
-
stop=None,
|
181 |
-
)
|
182 |
-
|
183 |
-
# Output the generated response to user
|
184 |
-
generated_text = response.choices[0].text
|
185 |
-
dispatcher.utter_message(text=generated_text)
|
|
|
10 |
from rasa_sdk.executor import CollectingDispatcher
|
11 |
import random
|
12 |
import os
|
13 |
+
import sys
|
14 |
import openai
|
15 |
|
16 |
+
# Add "/actions" to the sys.path
|
17 |
+
actions_path = os.path.abspath("/actions")
|
18 |
+
sys.path.insert(0, actions_path)
|
19 |
+
|
20 |
+
# Import search_content.py from /actions folder
|
21 |
+
from search_content import main_search
|
22 |
+
|
23 |
+
|
24 |
+
# Import api key from secrets
|
25 |
+
secret_value_0 = os.environ.get("openai")
|
26 |
+
|
27 |
+
openai.api_key = secret_value_0
|
28 |
+
# Provide your OpenAI API key
|
29 |
+
|
30 |
+
def generate_openai_response(query, model_engine="text-davinci-002", max_tokens=124, temperature=0.8):
|
31 |
+
"""Generate a response using the OpenAI API."""
|
32 |
+
# Run the main function from search_content.py and store the results in a variable
|
33 |
+
results = main_search(query)
|
34 |
+
|
35 |
+
# Create context from the results
|
36 |
+
context = "".join([f"#{str(i)}" for i in results])[:2014] # Trim the context to 2014 characters - Modify as necessory
|
37 |
+
prompt_template = f"Relevant context: {context}\n\n Answer the question in detail: {query}"
|
38 |
+
|
39 |
+
# Generate a response using the OpenAI API
|
40 |
+
response = openai.Completion.create(
|
41 |
+
engine=model_engine,
|
42 |
+
prompt=prompt_template,
|
43 |
+
max_tokens=max_tokens,
|
44 |
+
temperature=temperature,
|
45 |
+
n=1,
|
46 |
+
stop=None,
|
47 |
+
)
|
48 |
+
|
49 |
+
return response.choices[0].text.strip()
|
50 |
+
|
51 |
+
class GetOpenAIResponse(Action):
|
52 |
+
|
53 |
+
def name(self) -> Text:
|
54 |
+
return "action_get_response_openai"
|
55 |
+
|
56 |
+
def run(self,
|
57 |
+
dispatcher: CollectingDispatcher,
|
58 |
+
tracker: Tracker,
|
59 |
+
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
|
60 |
+
|
61 |
+
# Use OpenAI API to generate a response
|
62 |
+
|
63 |
+
# Output the generated response to user
|
64 |
+
generated_text = response.choices[0].text
|
65 |
+
dispatcher.utter_message(text=generated_text)
|
66 |
+
|
67 |
class GeneralHelp(Action):
|
68 |
def name(self) -> Text:
|
69 |
return "action_general_help"
|
|
|
205 |
# Output the generated response to user
|
206 |
generated_text = response.choices[0].text
|
207 |
dispatcher.utter_message(text=generated_text)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|