ReyDev commited on
Commit
4d6ca8c
Β·
unverified Β·
1 Parent(s): 74b0129

πŸ”§ refactor(ai.py): add RunTree parameter to AnthropicCustom class for better traceability

Browse files

✨ feat(ai.py): add get_final_response method to post run details to LANGCHAIN_ENDPOINT
πŸ”§ refactor(ai.py): modify get_anthropic_response_async to append each line to final_response
πŸ”§ refactor(ai.py): add __str__, __repr__, and __iter__ methods to return final_response
πŸ”§ refactor(const.py): update CSS for better layout and responsiveness
πŸ”₯ remove(const.py): remove Instant1_3 from ClaudeModels enum as it's no longer used
✨ feat(settings.py): add LANGCHAIN_API_KEY, LANGCHAIN_ENDPOINT, and LANGCHAIN_PROJECT to Settings class for new feature integration

claude_space/ai.py CHANGED
@@ -1,18 +1,32 @@
1
- from anthropic import HUMAN_PROMPT, Anthropic, AsyncAnthropic
 
 
 
2
  from dotenv import load_dotenv
3
  from langsmith.run_helpers import traceable
 
 
 
4
 
5
  load_dotenv()
6
 
7
 
 
8
  class AnthropicCustom:
9
- def __init__(self, api_key, model, max_tokens=1000, prompt=""):
 
 
 
 
 
 
 
10
  self.api_key = api_key
11
  self.model = model
12
  self.max_tokens = max_tokens
13
  self.prompt = prompt
 
14
 
15
- @traceable(run_type="llm", name="Claude", tags=["ai", "anthropic"])
16
  def get_anthropic_response(self):
17
  syncClient = Anthropic(api_key=self.api_key, timeout=5)
18
  response = syncClient.completions.create(
@@ -20,18 +34,48 @@ class AnthropicCustom:
20
  model=self.model,
21
  max_tokens_to_sample=self.max_tokens,
22
  )
 
23
  return response.completion
24
 
25
- @traceable(run_type="llm", name="Claude", tags=["ai", "anthropic"])
26
  async def get_anthropic_response_async(self):
27
  asyncClient = AsyncAnthropic(api_key=self.api_key, timeout=60)
28
- async for line in await asyncClient.completions.create(
29
- prompt=self.prompt,
30
- model=self.model,
31
- max_tokens_to_sample=self.max_tokens,
32
- stop_sequences=[
33
- HUMAN_PROMPT,
34
- ],
35
- stream=True,
36
- ):
37
- yield line.completion
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datetime
2
+
3
+ import requests
4
+ from anthropic import Anthropic, AsyncAnthropic
5
  from dotenv import load_dotenv
6
  from langsmith.run_helpers import traceable
7
+ from langsmith.run_trees import RunTree
8
+
9
+ from claude_space.settings import settings
10
 
11
  load_dotenv()
12
 
13
 
14
+ @traceable(run_type="llm")
15
  class AnthropicCustom:
16
+ def __init__(
17
+ self,
18
+ api_key,
19
+ model,
20
+ run_tree: RunTree,
21
+ max_tokens=1000,
22
+ prompt="",
23
+ ):
24
  self.api_key = api_key
25
  self.model = model
26
  self.max_tokens = max_tokens
27
  self.prompt = prompt
28
+ self.run_tree = run_tree
29
 
 
30
  def get_anthropic_response(self):
31
  syncClient = Anthropic(api_key=self.api_key, timeout=5)
32
  response = syncClient.completions.create(
 
34
  model=self.model,
35
  max_tokens_to_sample=self.max_tokens,
36
  )
37
+ self.final_response = response.completion
38
  return response.completion
39
 
 
40
  async def get_anthropic_response_async(self):
41
  asyncClient = AsyncAnthropic(api_key=self.api_key, timeout=60)
42
+ self.final_response = ""
43
+ try:
44
+ async for line in await asyncClient.completions.create(
45
+ prompt=self.prompt,
46
+ model=self.model,
47
+ max_tokens_to_sample=self.max_tokens,
48
+ stream=True,
49
+ ):
50
+ self.final_response += line.completion
51
+ yield line.completion
52
+ finally:
53
+ self.get_final_response()
54
+
55
+ def get_final_response(self):
56
+ requests.post(
57
+ url=f"{settings.LANGCHAIN_ENDPOINT}/runs",
58
+ json={
59
+ "name": "Anthropic",
60
+ "outputs": {"text": self.final_response},
61
+ "run_type": "chain",
62
+ "inputs": {"text": self.prompt},
63
+ "start_time": datetime.datetime.utcnow().isoformat(),
64
+ "end_time": datetime.datetime.utcnow().isoformat(),
65
+ "tags": [self.model, self.max_tokens],
66
+ "parent_run_id": str(self.run_tree.id),
67
+ "session_name": settings.LANGCHAIN_PROJECT,
68
+ },
69
+ headers={
70
+ "x-api-key": settings.LANGCHAIN_API_KEY,
71
+ },
72
+ )
73
+
74
+ def __str__(self):
75
+ return self.final_response
76
+
77
+ def __repr__(self):
78
+ return self.final_response
79
+
80
+ def __iter__(self):
81
+ return iter(self.final_response)
claude_space/const.py CHANGED
@@ -1,10 +1,42 @@
1
  import enum
2
 
3
  CSS = """
4
- .contain { display: flex; flex-direction: column; }
5
- .gradio-container { height: 100vh !important; }
6
- #component-0 { height: 100%; }
7
- #chatbot { flex-grow: 1; overflow: auto;}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  """
9
 
10
 
@@ -15,7 +47,6 @@ class ClaudeDefaultFormatter(enum.Enum):
15
  class ClaudeModels(str, enum.Enum):
16
  Inatant1_1: str = "claude-instant-1"
17
  Instant1_2: str = "claude-instant-1.2"
18
- Instant1_3: str = "claude-instant-1.3"
19
  Claude2: str = "claude-2"
20
 
21
 
 
1
  import enum
2
 
3
  CSS = """
4
+ body, html {
5
+ height: 100%;
6
+ margin: 0;
7
+ display: flex;
8
+ flex-direction: column;
9
+ }
10
+ .gradio.app {
11
+ flex: 1;
12
+ display: flex;
13
+ flex-direction: column;
14
+ }
15
+ .gradio.app .interface {
16
+ flex: 1;
17
+ display: flex;
18
+ flex-direction: column;
19
+ }
20
+ .gradio.app .interface .panel {
21
+ flex: 1;
22
+ display: flex;
23
+ flex-direction: column;
24
+ }
25
+ .contain {
26
+ display: flex;
27
+ flex-direction: column;
28
+ }
29
+ .gradio-container {
30
+ width: 100% !important;
31
+ height: 100% !important;
32
+ }
33
+ #component-0 {
34
+ height: 100%;
35
+ }
36
+ #chatbot {
37
+ flex-grow: 1;
38
+ overflow: auto;
39
+ }
40
  """
41
 
42
 
 
47
  class ClaudeModels(str, enum.Enum):
48
  Inatant1_1: str = "claude-instant-1"
49
  Instant1_2: str = "claude-instant-1.2"
 
50
  Claude2: str = "claude-2"
51
 
52
 
claude_space/settings.py CHANGED
@@ -8,6 +8,9 @@ load_dotenv()
8
  class Settings:
9
 
10
  ANTHROPIC_API_KEY: str = os.environ.get("ANTHROPIC_API_KEY")
 
 
 
11
 
12
 
13
  settings = Settings()
 
8
  class Settings:
9
 
10
  ANTHROPIC_API_KEY: str = os.environ.get("ANTHROPIC_API_KEY")
11
+ LANGCHAIN_API_KEY: str = os.environ.get("LANGCHAIN_API_KEY")
12
+ LANGCHAIN_ENDPOINT: str = os.environ.get("LANGCHAIN_ENDPOINT")
13
+ LANGCHAIN_PROJECT: str = os.environ.get("LANGCHAIN_PROJECT")
14
 
15
 
16
  settings = Settings()