Spaces:
Sleeping
Sleeping
from typing import Optional, List | |
from llama_index.core.vector_stores import ( | |
MetadataFilter, | |
MetadataFilters, | |
FilterCondition, | |
) | |
from llama_index.core.memory import ChatMemoryBuffer | |
from llama_index.core.tools import QueryEngineTool, ToolMetadata | |
from llama_index.agent.openai import OpenAIAgent | |
from llama_index.llms.openai import OpenAI | |
from llama_index.storage.chat_store.redis import RedisChatStore | |
from llama_index.core.memory import ChatMemoryBuffer | |
from llama_index.core.query_engine import CitationQueryEngine | |
from llama_index.core import Settings | |
from core.chat.chatstore import ChatStore | |
from service.dto import ChatMessage | |
from config import GPTBOT_CONFIG | |
from core.prompt import SYSTEM_BOT_TEMPLATE | |
import redis | |
import os | |
import json | |
class Engine: | |
def __init__(self): | |
self.llm = OpenAI( | |
temperature=GPTBOT_CONFIG.temperature, | |
model=GPTBOT_CONFIG.model, | |
max_tokens=GPTBOT_CONFIG.max_tokens, | |
api_key=GPTBOT_CONFIG.api_key, | |
) | |
self.chat_store = ChatStore() | |
Settings.llm = self.llm | |
def _build_description_bot(self, title, category): | |
try: | |
prompt = f"Write a detailed description for an OpenAI agent with the title '{title}' and categorized under '{category}'." | |
description = self.llm.complete(prompt) | |
return description | |
except Exception as e: | |
return f"Error generating description: {str(e)}" | |
def get_citation_engine(self, title, category, index): | |
filters = MetadataFilters( | |
filters=[ | |
MetadataFilter(key="title", value=title), | |
MetadataFilter(key="category", value=category), | |
], | |
condition=FilterCondition.AND, | |
) | |
# Create the QueryEngineTool with the index and filters | |
kwargs = {"similarity_top_k": 5, "filters": filters} | |
retriever = index.as_retriever(**kwargs) | |
citation_engine = CitationQueryEngine(retriever=retriever) | |
return citation_engine | |
def get_chat_engine( | |
self, session_id, index, title=None, category=None, type="general" | |
): | |
# Create the QueryEngineTool based on the type | |
if type == "general": | |
# query_engine = index.as_query_engine(similarity_top_k=3) | |
citation_engine = CitationQueryEngine.from_args(index, similarity_top_k=5) | |
description = "A book containing information about medicine" | |
else: | |
citation_engine = self.get_citation_engine(title, category, index) | |
description = self._build_description_bot() | |
metadata = ToolMetadata(name="bot-belajar", description=description) | |
print(metadata) | |
vector_query_engine = QueryEngineTool( | |
query_engine=citation_engine, metadata=metadata | |
) | |
print(vector_query_engine) | |
# Initialize the OpenAI agent with the tools | |
chat_engine = OpenAIAgent.from_tools( | |
tools=[vector_query_engine], | |
llm=self.llm, | |
memory=self.chat_store.initialize_memory_bot(session_id), | |
# memory = self.initialize_memory_bot(session_id), | |
system_prompt=SYSTEM_BOT_TEMPLATE, | |
) | |
return chat_engine |