from typing import Any from llama_index.core.vector_stores import ( MetadataFilter, MetadataFilters, FilterCondition, ) from llama_index.core.memory import ChatMemoryBuffer from llama_index.core.tools import QueryEngineTool, ToolMetadata from llama_index.agent.openai import OpenAIAgent from llama_index.llms.openai import OpenAI from llama_index.storage.chat_store.redis import RedisChatStore from llama_index.core.storage.chat_store import SimpleChatStore from llama_index.core.memory import ChatMemoryBuffer from llama_index.core.query_engine import CitationQueryEngine from llama_index.core import Settings from config import GPTBOT_CONFIG from core.prompt import SYSTEM_BOT_TEMPLATE import redis import os class Engine: def __init__(self): self.llm = OpenAI( temperature=GPTBOT_CONFIG.temperature, model=GPTBOT_CONFIG.model, max_tokens=GPTBOT_CONFIG.max_tokens, api_key=GPTBOT_CONFIG.api_key, ) Settings.llm = self.llm def initialize_memory_bot(self, user_id = "1"): redis_client = redis.Redis( host="redis-10365.c244.us-east-1-2.ec2.redns.redis-cloud.com", port=10365, password=os.environ.get("REDIS_PASSWORD"), ) # chat_store = SimpleChatStore() chat_store = RedisChatStore(redis_client=redis_client, ttl=3600) # Need to be configured memory = ChatMemoryBuffer.from_defaults( token_limit=3000, chat_store=chat_store, chat_store_key=user_id ) return memory def _build_description_bot(self, title, category): try: prompt = f"Write a detailed description for an OpenAI agent with the title '{title}' and categorized under '{category}'." description = self.llm.complete(prompt) return description except Exception as e: return f"Error generating description: {str(e)}" def index_to_query_engine(self, title, category, index): filters = MetadataFilters( filters=[ MetadataFilter(key="title", value=title), MetadataFilter(key="category", value=category), ], condition=FilterCondition.AND, ) # Create the QueryEngineTool with the index and filters kwargs = {"similarity_top_k": 5, "filters": filters} query_engine = index.as_query_engine(**kwargs) return query_engine def get_citation_engine(self, title, category, index): filters = MetadataFilters( filters=[ MetadataFilter(key="title", value=title), MetadataFilter(key="category", value=category), ], condition=FilterCondition.AND, ) # Create the QueryEngineTool with the index and filters kwargs = {"similarity_top_k": 5, "filters": filters} retriever = index.as_retriever(**kwargs) citation_engine = CitationQueryEngine(retriever=retriever) return citation_engine def get_chat_engine(self, index, title=None, category=None, type="general"): # Define the metadata for the QueryEngineTool # Create the QueryEngineTool based on the type if type == "general": # query_engine = index.as_query_engine(similarity_top_k=3) citation_engine = CitationQueryEngine.from_args(index, similarity_top_k=5) description = "A book containing information about medicine" else: query_engine = self.index_to_query_engine(title, category, index) citation_engine = self.get_citation_engine(title, category, index) description = self._build_description_bot() metadata = ToolMetadata( name="bot-belajar", description=description ) print(metadata) vector_query_engine = QueryEngineTool( query_engine=citation_engine, metadata=metadata ) print(vector_query_engine) # Initialize the OpenAI agent with the tools chat_engine = OpenAIAgent.from_tools( tools=[vector_query_engine], llm=self.llm, memory=self.initialize_memory_bot(), system_prompt=SYSTEM_BOT_TEMPLATE, ) return chat_engine def get_chat_history(self): pass