Spaces:
Sleeping
Sleeping
File size: 4,605 Bytes
004667e 8366c08 0f08318 004667e 0f08318 004667e c7a88be 004667e 2856e98 004667e 8366c08 004667e a92579f 004667e 2856e98 8366c08 004667e 2856e98 004667e 8366c08 004667e 8366c08 004667e 8366c08 004667e 8366c08 c65b59b 8366c08 c65b59b 8366c08 c65b59b 8366c08 c65b59b 004667e 5b84489 004667e c7a88be c65b59b 004667e 8366c08 004667e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
from llama_index.core.agent import ReActAgent
from llama_index.llms.openai import OpenAI
from llama_index.core.tools import FunctionTool
from opensearchpy import OpenSearch
from gradio_client import Client
import streamlit as st
import json
import openai
import warnings
warnings.filterwarnings('ignore')
import os
openai_api_key = os.getenv("OPENAI_API_KEY")
job_id = os.getenv('job_id')
user = os.getenv('USERNAME')
password = os.getenv('PASSWORD')
host = os.getenv('HOST')
port = int(os.getenv('PORT'))
auth = (user,password)
client = openai.OpenAI(api_key=openai_api_key)
# os_client = OpenSearch(
# hosts = [{'host': host, 'port': port}],
# http_auth = auth,
# use_ssl = True,
# verify_certs = False
# )
# indices = os_client.cat.indices(format="json")
# list_of_indeces = []
# for index in indices:
# list_of_indeces.append(index['index'])
def rag_app(user_input: str) -> str:
gr_client = Client("anasmkh/QdrantVectorStore_Llamaindex")
result = gr_client.predict(
user_input=user_input,
api_name="/chat_with_ai"
)
return result
rag_tool = FunctionTool.from_defaults(fn=rag_app)
def query_generator(user_input:str) -> str:
job = job_id
response = client.fine_tuning.jobs.retrieve(job)
completion = client.chat.completions.create(
model=response.fine_tuned_model,
messages=[
{"role": "system", "content": """You are a highly skilled assistant trained to translate natural language requests into accurate and efficient OpenSearch JSON queries. Follow a clear, step-by-step process to:
Understand the user's request by breaking it down into components such as filters, aggregations, sort criteria, and specific fields.
Pay special attention to fields with unique names, such as Date (instead of timestamp) and Stream (instead of type), and ensure they are used correctly in the query.
Recognize that the user operates within two main opcos: Zambia and Eswatini, each containing ptm_counters, ptm_events, and multiple streams like ers-daily.
Generate a valid JSON query strictly based on the provided indices, ensuring it aligns with the user's prompt.
When generating the query:
Be precise and include only necessary fields and components relevant to the request.
Assume any unspecified context or detail needs clarification and provide a clear explanation of your assumptions if needed.
Optimize the query for OpenSearch performance and readability.
Your goal is to provide a query that directly addresses the user's needs while being efficient and valid within the OpenSearch framework."""
},
{"role": "user", "content": user_input}
])
return completion.choices[0].message
query_tool = FunctionTool.from_defaults(fn=query_generator)
llm = OpenAI(model="gpt-3.5-turbo", temperature=0)
agent = ReActAgent.from_tools([query_tool,rag_tool], llm=llm, verbose=True)
def implement_query(generated_query):
try:
st.write("Raw Query:", generated_query)
if isinstance(generated_query, str):
generated_query = generated_query.replace("'", '"')
query = json.loads(generated_query)
else:
query = generated_query
st.write("Validated Query:", query)
# response = os_client.search(body=query)
# return response
# except json.JSONDecodeError as e:
# st.error("Error: The generated query is not valid JSON.")
# st.write(f"JSONDecodeError Details: {e}")
# except Exception as e:
# st.error(f"Error executing OpenSearch query: {e}")
# st.write(f"Exception Details: {e}")
st.subheader('OpenSearch Assistant')
user_input = st.text_input("Enter your query:", "")
if st.button("Submit"):
if user_input:
with st.spinner("Processing..."):
try:
response = agent.chat(user_input)
st.success("Query Processed Successfully!")
st.subheader("Agent Response:")
sources = response.sources
for source in sources:
st.write('Used Tool: ',source.tool_name)
if source.tool_name =='query_generator':
st.write(source.raw_output.content)
os_response = implement_query(source.raw_output.content)
st.subheader('OS Response')
st.write(os_response)
else:
st.write(source.raw_output[0][0][1])
except Exception as e:
st.error(f"Error: {e}")
else:
st.warning("Please enter a query to process.")
|