bstraehle commited on
Commit
4d824dd
·
1 Parent(s): b41edd4

Update agent_llamaindex.py

Browse files
Files changed (1) hide show
  1. agent_llamaindex.py +8 -43
agent_llamaindex.py CHANGED
@@ -1,10 +1,8 @@
1
- import os, tiktoken
2
 
3
  from datetime import date
4
  from llama_hub.tools.weather import OpenWeatherMapToolSpec
5
- from llama_index import ServiceContext
6
  from llama_index.agent import OpenAIAgent
7
- from llama_index.callbacks import CallbackManager, TokenCountingHandler
8
  from llama_index.llms import OpenAI
9
  from llama_index.tools import FunctionTool
10
 
@@ -14,54 +12,21 @@ def today_tool(text: str) -> str:
14
  Any date mathematics should occur outside this function."""
15
  return str(date.today())
16
 
17
- def get_callback_manager(config):
18
- token_counter = TokenCountingHandler(
19
- tokenizer = tiktoken.encoding_for_model(config["model"]).encode
20
- )
21
-
22
- token_counter.reset_counts()
23
-
24
- return CallbackManager([token_counter])
25
-
26
- def get_callback(token_counter):
27
- return ("Tokens Used: " +
28
- str(token_counter.total_llm_token_count) + "\n" +
29
- "Prompt Tokens: " +
30
- str(token_counter.prompt_llm_token_count) + "\n" +
31
- "Completion Tokens: " +
32
- str(token_counter.completion_llm_token_count))
33
-
34
- def get_llm(config):
35
- return OpenAI(
36
- model = config["model"],
37
- temperature = config["temperature"]
38
- )
39
-
40
- def get_service_context(config):
41
- return ServiceContext.from_defaults(
42
- callback_manager = get_callback_manager(config),
43
- llm = get_llm(config)
44
- )
45
-
46
  def agent_llamaindex(config, prompt):
47
- service_context = get_service_context(config)
48
-
 
 
49
  tool_spec = OpenWeatherMapToolSpec(key = os.environ["OPENWEATHERMAP_API_KEY"])
50
  tools = tool_spec.to_tool_list()
51
 
52
  date_tool = FunctionTool.from_defaults(fn = today_tool)
53
-
54
  agent = OpenAIAgent.from_tools(
55
  [tools[0], # built-in tools
56
  date_tool], # custom tools
57
- llm = get_llm(config),
58
- service_context = service_context,
59
  verbose = True
60
  )
61
 
62
- completion = agent.chat(prompt)
63
-
64
- callback = get_callback(
65
- service_context.callback_manager.handlers[0])
66
-
67
- return completion, callback
 
1
+ import os
2
 
3
  from datetime import date
4
  from llama_hub.tools.weather import OpenWeatherMapToolSpec
 
5
  from llama_index.agent import OpenAIAgent
 
6
  from llama_index.llms import OpenAI
7
  from llama_index.tools import FunctionTool
8
 
 
12
  Any date mathematics should occur outside this function."""
13
  return str(date.today())
14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  def agent_llamaindex(config, prompt):
16
+ llm = OpenAI(
17
+ model = config["model"],
18
+ temperature = config["temperature"])
19
+
20
  tool_spec = OpenWeatherMapToolSpec(key = os.environ["OPENWEATHERMAP_API_KEY"])
21
  tools = tool_spec.to_tool_list()
22
 
23
  date_tool = FunctionTool.from_defaults(fn = today_tool)
24
+
25
  agent = OpenAIAgent.from_tools(
26
  [tools[0], # built-in tools
27
  date_tool], # custom tools
28
+ llm = llm,
 
29
  verbose = True
30
  )
31
 
32
+ return agent.chat(prompt)