Spaces:
Sleeping
Sleeping
File size: 3,268 Bytes
0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 9002555 0743bb0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
from typing import Optional, List
from llama_index.core.vector_stores import (
MetadataFilter,
MetadataFilters,
FilterCondition,
)
from llama_index.core.memory import ChatMemoryBuffer
from llama_index.core.tools import QueryEngineTool, ToolMetadata
from llama_index.agent.openai import OpenAIAgent
from llama_index.llms.openai import OpenAI
from llama_index.storage.chat_store.redis import RedisChatStore
from llama_index.core.memory import ChatMemoryBuffer
from llama_index.core.query_engine import CitationQueryEngine
from llama_index.core import Settings
from core.chat.chatstore import ChatStore
from service.dto import ChatMessage
from config import GPTBOT_CONFIG
from core.prompt import SYSTEM_BOT_TEMPLATE
import redis
import os
import json
class Engine:
def __init__(self):
self.llm = OpenAI(
temperature=GPTBOT_CONFIG.temperature,
model=GPTBOT_CONFIG.model,
max_tokens=GPTBOT_CONFIG.max_tokens,
api_key=GPTBOT_CONFIG.api_key,
)
self.chat_store = ChatStore()
Settings.llm = self.llm
def _build_description_bot(self, title, category):
try:
prompt = f"Write a detailed description for an OpenAI agent with the title '{title}' and categorized under '{category}'."
description = self.llm.complete(prompt)
return description
except Exception as e:
return f"Error generating description: {str(e)}"
def get_citation_engine(self, title, category, index):
filters = MetadataFilters(
filters=[
MetadataFilter(key="title", value=title),
MetadataFilter(key="category", value=category),
],
condition=FilterCondition.AND,
)
# Create the QueryEngineTool with the index and filters
kwargs = {"similarity_top_k": 5, "filters": filters}
retriever = index.as_retriever(**kwargs)
citation_engine = CitationQueryEngine(retriever=retriever)
return citation_engine
def get_chat_engine(
self, session_id, index, title=None, category=None, type="general"
):
# Create the QueryEngineTool based on the type
if type == "general":
# query_engine = index.as_query_engine(similarity_top_k=3)
citation_engine = CitationQueryEngine.from_args(index, similarity_top_k=5)
description = "A book containing information about medicine"
else:
citation_engine = self.get_citation_engine(title, category, index)
description = self._build_description_bot()
metadata = ToolMetadata(name="bot-belajar", description=description)
print(metadata)
vector_query_engine = QueryEngineTool(
query_engine=citation_engine, metadata=metadata
)
print(vector_query_engine)
# Initialize the OpenAI agent with the tools
chat_engine = OpenAIAgent.from_tools(
tools=[vector_query_engine],
llm=self.llm,
memory=self.chat_store.initialize_memory_bot(session_id),
# memory = self.initialize_memory_bot(session_id),
system_prompt=SYSTEM_BOT_TEMPLATE,
)
return chat_engine |