invincible-jha commited on
Commit
81887fd
·
verified ·
1 Parent(s): db92003

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -2
app.py CHANGED
@@ -10,11 +10,15 @@ from sklearn.naive_bayes import MultinomialNB
10
  import asyncio
11
  from crewai import Agent as CrewAgent, Task, Crew
12
  import autogen
 
13
 
14
  # Set up logging
15
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
16
  logger = logging.getLogger(__name__)
17
 
 
 
 
18
  # Initialize the client with the Mistral-7B-Instruct-v0.2 model
19
  try:
20
  client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2")
@@ -106,6 +110,10 @@ response_expert_crew = CrewAgent(
106
  tools=[generate_response]
107
  )
108
 
 
 
 
 
109
  communication_expert_autogen = autogen.AssistantAgent(
110
  name="Communication_Expert",
111
  system_message=SHARED_CONTEXT + """
@@ -116,7 +124,7 @@ As the Communication Expert, your primary role is to interpret user queries with
116
  4. Ensure that any potential complaints or frustrations are acknowledged respectfully.
117
 
118
  Your output should be a rephrased version of the user's query that maintains its original intent while setting the stage for an empathetic and respectful response.""",
119
- llm_config={"config_list": [{"model": "gpt-3.5-turbo"}]}
120
  )
121
 
122
  response_expert_autogen = autogen.AssistantAgent(
@@ -131,7 +139,7 @@ As the Response Expert, your role is to provide accurate, helpful, and emotional
131
  6. Always uphold Zerodha's reputation for transparency and user-centric service.
132
 
133
  Your output should be a complete, informative response that addresses the user's query while demonstrating empathy and respect.""",
134
- llm_config={"config_list": [{"model": "gpt-3.5-turbo"}]}
135
  )
136
 
137
  user_proxy = autogen.UserProxyAgent(
 
10
  import asyncio
11
  from crewai import Agent as CrewAgent, Task, Crew
12
  import autogen
13
+ import openai
14
 
15
  # Set up logging
16
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
17
  logger = logging.getLogger(__name__)
18
 
19
+ # Set OpenAI API key from environment variable
20
+ openai.api_key = os.environ.get('OPENAI_API_KEY')
21
+
22
  # Initialize the client with the Mistral-7B-Instruct-v0.2 model
23
  try:
24
  client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2")
 
110
  tools=[generate_response]
111
  )
112
 
113
+ llm_config = {
114
+ "config_list": [{"model": "gpt-3.5-turbo", "api_key": os.environ.get('OPENAI_API_KEY')}]
115
+ }
116
+
117
  communication_expert_autogen = autogen.AssistantAgent(
118
  name="Communication_Expert",
119
  system_message=SHARED_CONTEXT + """
 
124
  4. Ensure that any potential complaints or frustrations are acknowledged respectfully.
125
 
126
  Your output should be a rephrased version of the user's query that maintains its original intent while setting the stage for an empathetic and respectful response.""",
127
+ llm_config=llm_config
128
  )
129
 
130
  response_expert_autogen = autogen.AssistantAgent(
 
139
  6. Always uphold Zerodha's reputation for transparency and user-centric service.
140
 
141
  Your output should be a complete, informative response that addresses the user's query while demonstrating empathy and respect.""",
142
+ llm_config=llm_config
143
  )
144
 
145
  user_proxy = autogen.UserProxyAgent(