Spaces:
Sleeping
Sleeping
from firebase_admin import db | |
import json | |
import boto3 | |
class AWSClaude: | |
def __init__( | |
self, llm, env, user_id, thread_id, stream_id, app_type, other_request_params | |
): | |
self.llm = llm | |
self.env = env | |
self.other_request_params = other_request_params | |
# RTDB init and params | |
self.user_id = user_id | |
self.thread_id = thread_id | |
self.stream_id = stream_id | |
self.app_type = app_type | |
# AWS Bedrock Auth | |
self.session = boto3.Session( | |
aws_access_key_id="AKIA6GBMA2X64YV2BLZJ", | |
aws_secret_access_key="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", | |
) | |
if llm == "ClaudeOpus": | |
self.bedrock_runtime = self.session.client( | |
service_name="bedrock-runtime", region_name="us-west-2" | |
) | |
else: | |
self.bedrock_runtime = self.session.client( | |
service_name="bedrock-runtime", region_name="us-east-1" | |
) | |
def stream(self, response): | |
"""self.entry_ref.update( | |
{ | |
"exec_status": True, | |
} | |
)""" | |
full_answer = "" | |
for event in response.get("body"): | |
try: | |
chunk = json.loads(event["chunk"]["bytes"]) | |
if chunk["type"] == "content_block_delta" and "delta" in chunk.keys(): | |
content_text = chunk["delta"]["text"] | |
full_answer += content_text | |
# print(content_text, end="") | |
# stream_ref = self.entry_ref.child("gpt_stream").child( | |
# self.stream_id | |
# ) # get into the child() node and use ".set" | |
# stream_ref.set(full_answer) | |
except Exception as e: | |
print(f"Error occurred with the steam loop {type(e).__name__}, -- {e}") | |
print("chunk ---", chunk) | |
raise e | |
# Set 'exec_status' back to false after execution is complete | |
# self.entry_ref.update( | |
# { | |
# "exec_status": False, | |
# } | |
# ) | |
return full_answer | |
def llm_select(self): | |
if self.llm == "ClaudeOpus": | |
return "anthropic.claude-3-opus-20240229-v1:0" | |
elif self.llm == "ClaudeSonnet": | |
return "anthropic.claude-3-sonnet-20240229-v1:0" | |
elif self.llm == "ClaudeHaiku": | |
return "anthropic.claude-3-haiku-20240307-v1:0" | |
def call_claude(self): | |
system_prompt = '"You are an expert Equity analyst, please don\'t explicitly mention that you are Claude, or from Anthropic, or an Equity analyst in your response"' | |
messages = self.other_request_params.get("messages", []) | |
combined_messages = " ".join([str(message) for message in messages]) | |
prompt = combined_messages | |
max_tokens = self.other_request_params.get("max_tokens", 3500) | |
temperature = self.other_request_params.get("temperature", 0) | |
top_p = self.other_request_params.get("top_p", 1) | |
body = json.dumps( | |
{ | |
"system": system_prompt, | |
"messages": [ | |
{"role": "user", "content": [{"type": "text", "text": prompt}]} | |
], | |
"anthropic_version": "bedrock-2023-05-31", | |
"max_tokens": max_tokens, | |
"temperature": temperature, | |
"top_p": top_p, | |
} | |
) | |
llm_id = self.llm_select() | |
print("llm id --- ", llm_id) | |
response = self.bedrock_runtime.invoke_model_with_response_stream( | |
body=body, | |
modelId=llm_id, | |
accept="application/json", | |
contentType="application/json", | |
) | |
return response | |
def invoke(self): | |
response = self.call_claude() | |
output = self.stream(response) | |
return output | |