File size: 3,954 Bytes
d423839
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
from firebase_admin import db
import json
import boto3


class AWSClaude:
    def __init__(
        self, llm, env, user_id, thread_id, stream_id, app_type, other_request_params
    ):
        self.llm = llm
        self.env = env
        self.other_request_params = other_request_params

        # RTDB init and params
        self.user_id = user_id
        self.thread_id = thread_id
        self.stream_id = stream_id
        self.app_type = app_type
        
        # AWS Bedrock Auth
        self.session = boto3.Session(
            aws_access_key_id="AKIA6GBMA2X64YV2BLZJ",
            aws_secret_access_key="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",
        )


        if llm == "ClaudeOpus":
            self.bedrock_runtime = self.session.client(
                service_name="bedrock-runtime", region_name="us-west-2"
            )
        else:
            self.bedrock_runtime = self.session.client(
                service_name="bedrock-runtime", region_name="us-east-1"
            )

    def stream(self, response):
        """self.entry_ref.update(
            {
                "exec_status": True,
            }
        )"""

        full_answer = ""

        for event in response.get("body"):
            try:
                chunk = json.loads(event["chunk"]["bytes"])
                if chunk["type"] == "content_block_delta" and "delta" in chunk.keys():
                    content_text = chunk["delta"]["text"]

                    full_answer += content_text

                    # print(content_text, end="")

                    # stream_ref = self.entry_ref.child("gpt_stream").child(
                    #     self.stream_id
                    # )  # get into the child() node and use ".set"
                    # stream_ref.set(full_answer)

            except Exception as e:
                print(f"Error occurred with the steam loop {type(e).__name__}, -- {e}")
                print("chunk ---", chunk)
                raise e

        # Set 'exec_status' back to false after execution is complete
        # self.entry_ref.update(
        #     {
        #         "exec_status": False,
        #     }
        # )

        return full_answer
    
    def llm_select(self):
        if self.llm == "ClaudeOpus":
            return "anthropic.claude-3-opus-20240229-v1:0"
        elif self.llm == "ClaudeSonnet":
            return "anthropic.claude-3-sonnet-20240229-v1:0"
        elif self.llm == "ClaudeHaiku":
            return "anthropic.claude-3-haiku-20240307-v1:0"

    def call_claude(self):
        system_prompt = '"You are an expert Equity analyst, please don\'t explicitly mention that you are Claude, or from Anthropic, or an Equity analyst in your response"'
        messages = self.other_request_params.get("messages", [])
        combined_messages = " ".join([str(message) for message in messages])
        prompt = combined_messages
        max_tokens = self.other_request_params.get("max_tokens", 3500)
        temperature = self.other_request_params.get("temperature", 0)
        top_p = self.other_request_params.get("top_p", 1)

        body = json.dumps(
            {
                "system": system_prompt,
                "messages": [
                    {"role": "user", "content": [{"type": "text", "text": prompt}]}
                ],
                "anthropic_version": "bedrock-2023-05-31",
                "max_tokens": max_tokens,
                "temperature": temperature,
                "top_p": top_p,
            }
        )

        llm_id = self.llm_select()
        print("llm id --- ", llm_id)

        response = self.bedrock_runtime.invoke_model_with_response_stream(
            body=body,
            modelId=llm_id,
            accept="application/json",
            contentType="application/json",
        )

        return response
    
    def invoke(self):
        response = self.call_claude()
        output = self.stream(response)
        return output