Spaces:
Build error
Build error
Update rag_llamaindex.py
Browse files- rag_llamaindex.py +6 -8
rag_llamaindex.py
CHANGED
@@ -62,15 +62,13 @@ class LlamaIndexRAG(BaseRAG):
|
|
62 |
return CallbackManager([token_counter])
|
63 |
|
64 |
def get_callback(self, token_counter):
|
65 |
-
return ("
|
66 |
-
str(token_counter.
|
67 |
-
"
|
68 |
str(token_counter.prompt_llm_token_count) + "\n" +
|
69 |
-
"
|
70 |
-
str(token_counter.completion_llm_token_count)
|
71 |
-
|
72 |
-
str(token_counter.total_llm_token_count))
|
73 |
-
|
74 |
def get_llm(self, config):
|
75 |
return OpenAI(
|
76 |
model = config["model_name"],
|
|
|
62 |
return CallbackManager([token_counter])
|
63 |
|
64 |
def get_callback(self, token_counter):
|
65 |
+
return ("Tokens Used: " +
|
66 |
+
str(token_counter.total_llm_token_count) + "\n" +
|
67 |
+
"Prompt Tokens: " +
|
68 |
str(token_counter.prompt_llm_token_count) + "\n" +
|
69 |
+
"Completion Tokens: " +
|
70 |
+
str(token_counter.completion_llm_token_count))
|
71 |
+
|
|
|
|
|
72 |
def get_llm(self, config):
|
73 |
return OpenAI(
|
74 |
model = config["model_name"],
|