Spaces:
Running
Running
PERPLEXITY_CLIENT
Browse files- app.py +6 -13
- chatbot.py +29 -0
app.py
CHANGED
@@ -73,6 +73,7 @@ from google.oauth2.service_account import Credentials
|
|
73 |
import vertexai
|
74 |
from vertexai.generative_models import GenerativeModel, Part
|
75 |
|
|
|
76 |
# import boto3
|
77 |
|
78 |
from chatbot import Chatbot
|
@@ -96,6 +97,7 @@ if is_env_local:
|
|
96 |
OPEN_AI_ASSISTANT_ID_GPT4_BOT1 = config["OPEN_AI_ASSISTANT_ID_GPT4_BOT1"]
|
97 |
OPEN_AI_ASSISTANT_ID_GPT3_BOT1 = config["OPEN_AI_ASSISTANT_ID_GPT3_BOT1"]
|
98 |
GROQ_API_KEY = config["GROQ_API_KEY"]
|
|
|
99 |
JUTOR_CHAT_KEY = config["JUTOR_CHAT_KEY"]
|
100 |
AWS_ACCESS_KEY = config["AWS_ACCESS_KEY"]
|
101 |
AWS_SECRET_KEY = config["AWS_SECRET_KEY"]
|
@@ -112,6 +114,7 @@ else:
|
|
112 |
OPEN_AI_ASSISTANT_ID_GPT4_BOT1 = os.getenv("OPEN_AI_ASSISTANT_ID_GPT4_BOT1")
|
113 |
OPEN_AI_ASSISTANT_ID_GPT3_BOT1 = os.getenv("OPEN_AI_ASSISTANT_ID_GPT3_BOT1")
|
114 |
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
|
|
115 |
JUTOR_CHAT_KEY = os.getenv("JUTOR_CHAT_KEY")
|
116 |
AWS_ACCESS_KEY = os.getenv("AWS_ACCESS_KEY")
|
117 |
AWS_SECRET_KEY = os.getenv("AWS_SECRET_KEY")
|
@@ -139,12 +142,7 @@ GBQ_CLIENT = bigquery.Client.from_service_account_info(json.loads(GBQ_KEY))
|
|
139 |
GROQ_CLIENT = Groq(api_key=GROQ_API_KEY)
|
140 |
GCS_SERVICE = GoogleCloudStorage(GCS_KEY)
|
141 |
GCS_CLIENT = GCS_SERVICE.client
|
142 |
-
|
143 |
-
# service_name="bedrock-runtime",
|
144 |
-
# aws_access_key_id=AWS_ACCESS_KEY,
|
145 |
-
# aws_secret_access_key=AWS_SECRET_KEY,
|
146 |
-
# region_name=AWS_REGION_NAME,
|
147 |
-
# )
|
148 |
|
149 |
# check open ai access
|
150 |
def check_open_ai_access(open_ai_api_key):
|
@@ -2585,11 +2583,6 @@ def get_chatbot_config(ai_name, transcript_state, key_moments, content_subject,
|
|
2585 |
"ai_client": GROQ_CLIENT,
|
2586 |
"ai_model_name": "groq_llama3",
|
2587 |
},
|
2588 |
-
# "lili": {
|
2589 |
-
# "ai_name": "lili",
|
2590 |
-
# "ai_client": BEDROCK_CLIENT,
|
2591 |
-
# "ai_model_name": "claude3",
|
2592 |
-
# },
|
2593 |
"lili": {
|
2594 |
"ai_name": "lili",
|
2595 |
"ai_client": GROQ_CLIENT,
|
@@ -2597,8 +2590,8 @@ def get_chatbot_config(ai_name, transcript_state, key_moments, content_subject,
|
|
2597 |
},
|
2598 |
"maimai": {
|
2599 |
"ai_name": "maimai",
|
2600 |
-
"ai_client":
|
2601 |
-
"ai_model_name": "
|
2602 |
}
|
2603 |
}
|
2604 |
ai_client = ai_name_clients_model.get(ai_name, "foxcat")["ai_client"]
|
|
|
73 |
import vertexai
|
74 |
from vertexai.generative_models import GenerativeModel, Part
|
75 |
|
76 |
+
|
77 |
# import boto3
|
78 |
|
79 |
from chatbot import Chatbot
|
|
|
97 |
OPEN_AI_ASSISTANT_ID_GPT4_BOT1 = config["OPEN_AI_ASSISTANT_ID_GPT4_BOT1"]
|
98 |
OPEN_AI_ASSISTANT_ID_GPT3_BOT1 = config["OPEN_AI_ASSISTANT_ID_GPT3_BOT1"]
|
99 |
GROQ_API_KEY = config["GROQ_API_KEY"]
|
100 |
+
PERPLEXITY_API_KEY = config["PERPLEXITY_API_KEY"]
|
101 |
JUTOR_CHAT_KEY = config["JUTOR_CHAT_KEY"]
|
102 |
AWS_ACCESS_KEY = config["AWS_ACCESS_KEY"]
|
103 |
AWS_SECRET_KEY = config["AWS_SECRET_KEY"]
|
|
|
114 |
OPEN_AI_ASSISTANT_ID_GPT4_BOT1 = os.getenv("OPEN_AI_ASSISTANT_ID_GPT4_BOT1")
|
115 |
OPEN_AI_ASSISTANT_ID_GPT3_BOT1 = os.getenv("OPEN_AI_ASSISTANT_ID_GPT3_BOT1")
|
116 |
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
117 |
+
PERPLEXITY_API_KEY = os.getenv("PERPLEXITY_API_KEY")
|
118 |
JUTOR_CHAT_KEY = os.getenv("JUTOR_CHAT_KEY")
|
119 |
AWS_ACCESS_KEY = os.getenv("AWS_ACCESS_KEY")
|
120 |
AWS_SECRET_KEY = os.getenv("AWS_SECRET_KEY")
|
|
|
142 |
GROQ_CLIENT = Groq(api_key=GROQ_API_KEY)
|
143 |
GCS_SERVICE = GoogleCloudStorage(GCS_KEY)
|
144 |
GCS_CLIENT = GCS_SERVICE.client
|
145 |
+
PERPLEXITY_CLIENT = OpenAI(api_key=PERPLEXITY_API_KEY, base_url="https://api.perplexity.ai")
|
|
|
|
|
|
|
|
|
|
|
146 |
|
147 |
# check open ai access
|
148 |
def check_open_ai_access(open_ai_api_key):
|
|
|
2583 |
"ai_client": GROQ_CLIENT,
|
2584 |
"ai_model_name": "groq_llama3",
|
2585 |
},
|
|
|
|
|
|
|
|
|
|
|
2586 |
"lili": {
|
2587 |
"ai_name": "lili",
|
2588 |
"ai_client": GROQ_CLIENT,
|
|
|
2590 |
},
|
2591 |
"maimai": {
|
2592 |
"ai_name": "maimai",
|
2593 |
+
"ai_client": PERPLEXITY_CLIENT,
|
2594 |
+
"ai_model_name": "perplexity_sonar",
|
2595 |
}
|
2596 |
}
|
2597 |
ai_client = ai_name_clients_model.get(ai_name, "foxcat")["ai_client"]
|
chatbot.py
CHANGED
@@ -77,6 +77,8 @@ class Chatbot:
|
|
77 |
return self.chat_with_groq(service_type, system_prompt, messages)
|
78 |
elif service_type == 'claude3':
|
79 |
return self.chat_with_claude3(system_prompt, messages)
|
|
|
|
|
80 |
else:
|
81 |
raise gr.Error("δΈζ―ζηζε‘η±»ε")
|
82 |
|
@@ -152,3 +154,30 @@ class Chatbot:
|
|
152 |
response_body = json.loads(response.get('body').read())
|
153 |
response_completion = response_body.get('content')[0].get('text').strip()
|
154 |
return response_completion
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
return self.chat_with_groq(service_type, system_prompt, messages)
|
78 |
elif service_type == 'claude3':
|
79 |
return self.chat_with_claude3(system_prompt, messages)
|
80 |
+
elif service_type == 'perplexity_sonar':
|
81 |
+
return self.chat_with_perplexity_sonar(system_prompt, messages)
|
82 |
else:
|
83 |
raise gr.Error("δΈζ―ζηζε‘η±»ε")
|
84 |
|
|
|
154 |
response_body = json.loads(response.get('body').read())
|
155 |
response_completion = response_body.get('content')[0].get('text').strip()
|
156 |
return response_completion
|
157 |
+
|
158 |
+
def chat_with_perplexity_sonar(self, system_prompt, messages):
|
159 |
+
"""δ½Ώη¨ Perplexity Sonar API ι²θ‘ε°θ©±"""
|
160 |
+
if not system_prompt.strip():
|
161 |
+
raise ValueError("System prompt cannot be empty")
|
162 |
+
|
163 |
+
messages.insert(0, {"role": "system", "content": system_prompt})
|
164 |
+
|
165 |
+
print("======model======")
|
166 |
+
print("perplexity-sonar")
|
167 |
+
|
168 |
+
try:
|
169 |
+
perplexity_client = self.ai_client
|
170 |
+
response = perplexity_client.chat.completions.create(
|
171 |
+
model="sonar-pro",
|
172 |
+
messages=messages,
|
173 |
+
max_tokens=500,
|
174 |
+
temperature=0.7,
|
175 |
+
top_p=0.9
|
176 |
+
)
|
177 |
+
response_completion = response.choices[0].message.content.strip()
|
178 |
+
return response_completion
|
179 |
+
|
180 |
+
except Exception as e:
|
181 |
+
print(f"Perplexity Sonar API Error: {e}")
|
182 |
+
raise gr.Error("θ Perplexity Sonar API ιθ¨ζηΌηι―θͺ€")
|
183 |
+
|