pvanand commited on
Commit
6a1e40c
·
1 Parent(s): ef3cd74

revert context feature

Browse files
Files changed (1) hide show
  1. actions/actions.py +19 -58
actions/actions.py CHANGED
@@ -14,7 +14,7 @@ import sys
14
  import openai
15
 
16
  # Add "/app/actions" to the sys.path
17
- actions_path = os.path.abspath("/workspaces/Omdena-faq-chatbot-RASA/RASA_OpenAI_actions_server/actions")
18
  sys.path.insert(0, actions_path)
19
 
20
  print("-#-System-path-#-")
@@ -31,54 +31,27 @@ secret_value_0 = os.environ.get("openai")
31
  openai.api_key = secret_value_0
32
  # Provide your OpenAI API key
33
 
34
- #model_engine="text-davinci-002"
35
- def generate_openai_response(user_queries, model_engine="gpt-3.5-turbo", max_tokens=100, temperature=0.5):
36
  """Generate a response using the OpenAI API."""
37
 
38
-
39
-
40
-
41
- messages_0=[
42
- {"system": f"Output the final question the user is asking, using the previous user questions as context only if necessory"}
43
- ]
44
- max_user_queries_to_include = min(3,len(user_queries))
45
- # The latest query is at the end of the list
46
- for i in range(len(user_queries)):
47
- if i<max_user_queries_to_include:
48
- messages_0.append({f"user_question_{i}": user_queries[-max_user_queries_to_include+i]})
49
-
50
- #Generate a response using the OpenAI API
51
- response_0 = openai.Completion.create(
52
- model="text-davinci-003",
53
- prompt=str(messages_0),
54
- max_tokens=20
55
- )
56
-
57
-
58
-
59
  # Run the main function from search_content.py and store the results in a variable
60
- extracted_query = response_0.choices[0].text.strip()#--3.5---#response_0.choices[0]['message']['content'].strip()
61
- results = main_search(extracted_query)
62
-
63
  # Create context from the results
64
  context = "".join([f"#{str(i)}" for i in results])[:2014] # Trim the context to 2014 characters - Modify as necessory
65
-
66
- messages_1=[
67
- {"role": "system", "content": f"You are tasked to answer user queries using only the following Relevant context: {context}"},
68
- {"role": "user", "content": extracted_query}]
69
-
70
- response_1 = openai.ChatCompletion.create(
71
- model="gpt-3.5-turbo",
72
- messages= messages_1,
73
- max_tokens=254,
74
- temperature=temperature,
75
- top_p=1,
76
- frequency_penalty=0,
77
- presence_penalty=0
78
  )
79
-
80
 
81
- return response_1.choices[0]['message']['content'].strip()#+"Debug INFO: -------------------- Openai-input_1: "+str(messages_1)+" Openai_input_0"+str( messages_0)+" user_queries: "+str(user_queries)
82
 
83
  class GetOpenAIResponse(Action):
84
 
@@ -90,24 +63,12 @@ class GetOpenAIResponse(Action):
90
  tracker: Tracker,
91
  domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
92
 
93
- # Extract conversation data
94
- conversation_history = tracker.events
95
-
96
- user_queries = []
97
- bot_responses = []
98
-
99
- for event in conversation_history:
100
- if event.get("event") == "user":
101
- user_queries.append(event.get("text"))
102
- elif event.get("event") == "bot":
103
- bot_responses.append(event.get("text"))
104
-
105
  # Use OpenAI API to generate a response
106
- #query = tracker.latest_message.get('text')
107
- response = generate_openai_response(user_queries)
108
 
109
  # Output the generated response to user
110
- dispatcher.utter_message(text=str(response))
111
 
112
  class GeneralHelp(Action):
113
  def name(self) -> Text:
@@ -249,4 +210,4 @@ class SayHelloWorld(Action):
249
 
250
  # Output the generated response to user
251
  generated_text = response.choices[0].text
252
- dispatcher.utter_message(text=generated_text)
 
14
  import openai
15
 
16
  # Add "/app/actions" to the sys.path
17
+ actions_path = os.path.abspath("/app/actions")
18
  sys.path.insert(0, actions_path)
19
 
20
  print("-#-System-path-#-")
 
31
  openai.api_key = secret_value_0
32
  # Provide your OpenAI API key
33
 
34
+ def generate_openai_response(query, model_engine="text-davinci-003", max_tokens=124, temperature=0.8):
 
35
  """Generate a response using the OpenAI API."""
36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  # Run the main function from search_content.py and store the results in a variable
38
+ results = main_search(query)
39
+
 
40
  # Create context from the results
41
  context = "".join([f"#{str(i)}" for i in results])[:2014] # Trim the context to 2014 characters - Modify as necessory
42
+ prompt_template = f"Relevant context: {context}\n\n Answer the question in detail: {query}"
43
+
44
+ # Generate a response using the OpenAI API
45
+ response = openai.Completion.create(
46
+ engine=model_engine,
47
+ prompt=prompt_template,
48
+ max_tokens=max_tokens,
49
+ temperature=temperature,
50
+ n=1,
51
+ stop=None,
 
 
 
52
  )
 
53
 
54
+ return response.choices[0].text.strip()
55
 
56
  class GetOpenAIResponse(Action):
57
 
 
63
  tracker: Tracker,
64
  domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
65
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  # Use OpenAI API to generate a response
67
+ query = tracker.latest_message.get('text')
68
+ response = generate_openai_response(query)
69
 
70
  # Output the generated response to user
71
+ dispatcher.utter_message(text=response)
72
 
73
  class GeneralHelp(Action):
74
  def name(self) -> Text:
 
210
 
211
  # Output the generated response to user
212
  generated_text = response.choices[0].text
213
+ dispatcher.utter_message(text=generated_text)