Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -8,83 +8,38 @@ import requests
|
|
8 |
|
9 |
openai_api_key = os.getenv('OPENAI_API_KEY')
|
10 |
|
11 |
-
|
12 |
-
Settings.embed_model = OpenAIEmbedding(model="text-embedding-ada-002")
|
13 |
|
14 |
-
|
15 |
|
|
|
|
|
16 |
|
17 |
|
18 |
-
def generate_opensearch_query(user_input):
|
19 |
-
prompt = f"""
|
20 |
-
You are an assistant trained to translate natural language requests into OpenSearch queries. Based on the user's request, generate an OpenSearch JSON query.
|
21 |
|
22 |
-
|
23 |
-
User Input: "Get all documents where the status is active."
|
24 |
-
Response:
|
25 |
-
{{
|
26 |
-
"query": {{
|
27 |
-
"match": {{
|
28 |
-
"status": "active"
|
29 |
-
}}
|
30 |
-
}}
|
31 |
-
}}
|
32 |
|
33 |
-
User Input: "Find records with priority high created in the last 7 days."
|
34 |
-
Response:
|
35 |
-
{{
|
36 |
-
"query": {{
|
37 |
-
"bool": {{
|
38 |
-
"must": [
|
39 |
-
{{ "match": {{ "priority": "high" }} }},
|
40 |
-
{{ "range": {{ "created_at": {{ "gte": "now-7d/d", "lte": "now" }} }} }}
|
41 |
-
]
|
42 |
-
}}
|
43 |
-
}}
|
44 |
-
}}
|
45 |
|
46 |
-
User Input: "Show documents where age is over 30 and sort by created date."
|
47 |
-
Response:
|
48 |
-
{{
|
49 |
-
"query": {{
|
50 |
-
"range": {{
|
51 |
-
"age": {{ "gt": 30 }}
|
52 |
-
}}
|
53 |
-
}},
|
54 |
-
"sort": [
|
55 |
-
{{ "created_date": {{ "order": "asc" }} }}
|
56 |
-
]
|
57 |
-
}}
|
58 |
|
59 |
-
|
60 |
-
Response:
|
61 |
-
"""
|
62 |
|
63 |
-
llm_response = Settings.llm.complete(prompt)
|
64 |
-
return llm_response
|
65 |
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
|
71 |
-
|
72 |
-
st.
|
73 |
-
|
74 |
-
user_input = st.text_area("Enter a Prompt:", height=150)
|
75 |
-
|
76 |
-
if st.button("Generate OpenSearch Query"):
|
77 |
-
if user_input.strip():
|
78 |
-
generated_query = generate_opensearch_query(user_input)
|
79 |
-
|
80 |
-
st.subheader("Generated OpenSearch Query:")
|
81 |
-
st.json(json.loads(generated_query.text))
|
82 |
-
|
83 |
-
# try:
|
84 |
-
# response = implement_query(generated_query)
|
85 |
-
# st.subheader("OpenSearch Response:")
|
86 |
-
# st.json(response)
|
87 |
-
# except Exception as e:
|
88 |
-
# st.error(f"Error executing OpenSearch query: {e}")
|
89 |
else:
|
90 |
-
st.
|
|
|
8 |
|
9 |
openai_api_key = os.getenv('OPENAI_API_KEY')
|
10 |
|
11 |
+
client = OpenAI(api_key=openai_api_key)
|
|
|
12 |
|
13 |
+
|
14 |
|
15 |
+
job_id = os.getenv('job_id')
|
16 |
+
response = client.fine_tuning.jobs.retrieve(job_id)
|
17 |
|
18 |
|
|
|
|
|
|
|
19 |
|
20 |
+
st.title("AI Assistant - Fine-Tuned Model")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
+
user_input = st.text_input("Enter your question:")
|
|
|
|
|
25 |
|
|
|
|
|
26 |
|
27 |
+
if st.button("Ask"):
|
28 |
+
if user_input:
|
29 |
+
|
30 |
+
try:
|
31 |
+
completion = client.chat.completions.create(
|
32 |
+
model="ft:gpt-3.5-turbo-0125:personal::AVdWfIMJ",
|
33 |
+
messages=[
|
34 |
+
{"role": "system", "content": "You are an assistant trained to translate natural language requests into OpenSearch queries. Use a step-by-step approach to break down the user's request, identify components like filters, aggregations, and sort criteria, and generate a valid OpenSearch JSON query."},
|
35 |
+
{"role": "user", "content": user_input}
|
36 |
+
]
|
37 |
+
)
|
38 |
+
|
39 |
+
st.write("Assistant's response:")
|
40 |
+
st.write(completion.choices[0].message.content)
|
41 |
|
42 |
+
except Exception as e:
|
43 |
+
st.write("Error:", e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
else:
|
45 |
+
st.write("Please enter a question.")
|