mgokg commited on
Commit
a613311
·
verified ·
1 Parent(s): b34a324

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -2
app.py CHANGED
@@ -16,9 +16,29 @@ collection = client_chroma.get_or_create_collection(name=collection_name)
16
  embedding_function = embedding_functions.DefaultEmbeddingFunction()
17
 
18
  client = Client("Qwen/Qwen2.5-72B-Instruct")
19
- def ask_llm(llm_prompt_input):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  result = client.predict(
21
- query=f"{llm_prompt_input}",
22
  history=[],
23
  system="You are Qwen, created by Alibaba Cloud. You are a helpful assistant.",
24
  api_name="/model_chat"
 
16
  embedding_function = embedding_functions.DefaultEmbeddingFunction()
17
 
18
  client = Client("Qwen/Qwen2.5-72B-Instruct")
19
+ def ask_llm(llm_prompt_input):
20
+ # Erstelle Embedding für den Prompt
21
+ query_embedding = embedding_function([llm_prompt_input])[0]
22
+
23
+ # Führe die Ähnlichkeitssuche durch
24
+ results = collection.query(
25
+ query_embeddings=[query_embedding],
26
+ n_results=3
27
+ )
28
+
29
+ # Formatiere die Ergebnisse
30
+ formatted_results = []
31
+ for i, doc in enumerate(results["documents"][0]):
32
+ metadata = results["metadatas"][0][i]
33
+ filename = metadata["filename"]
34
+ formatted_results.append(f"{doc}\n")
35
+
36
+ queri = "\n".join(formatted_results)
37
+ #return "\n".join(formatted_results)
38
+
39
+
40
  result = client.predict(
41
+ query=f"{llm_prompt_input} kontext:{queri}",
42
  history=[],
43
  system="You are Qwen, created by Alibaba Cloud. You are a helpful assistant.",
44
  api_name="/model_chat"