mgokg commited on
Commit
de4f158
·
verified ·
1 Parent(s): bfed16d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -27
app.py CHANGED
@@ -4,11 +4,9 @@ from chromadb.utils import embedding_functions
4
  from PyPDF2 import PdfReader
5
  from gradio_client import Client
6
 
7
-
8
- # Starte ChromaDB
9
  # Initialisiere ChromaDB
10
- #client_chroma = chromadb.Client()
11
- client_chroma = chromadb.PersistentClient(path = "./tmp", settings = None,)
12
  collection_name = "pdf_collection"
13
  collection = client_chroma.get_or_create_collection(name=collection_name)
14
 
@@ -16,34 +14,14 @@ collection = client_chroma.get_or_create_collection(name=collection_name)
16
  embedding_function = embedding_functions.DefaultEmbeddingFunction()
17
 
18
  client = Client("Qwen/Qwen2.5-72B-Instruct")
19
- def ask_llm(llm_prompt_input):
20
- # Erstelle Embedding für den Prompt
21
- query_embedding = embedding_function([llm_prompt_input])[0]
22
-
23
- # Führe die Ähnlichkeitssuche durch
24
- results = collection.query(
25
- query_embeddings=[query_embedding],
26
- n_results=3
27
- )
28
-
29
- # Formatiere die Ergebnisse
30
- formatted_results = []
31
- for i, doc in enumerate(results["documents"][0]):
32
- metadata = results["metadatas"][0][i]
33
- filename = metadata["filename"]
34
- formatted_results.append(f"{doc}\n")
35
-
36
- #queri = "\n".join(formatted_results)
37
- #return "\n".join(formatted_results)
38
- print(join(formatted_results))
39
-
40
  result = client.predict(
41
- query=llm_prompt_input,
42
  history=[],
43
  system="You are Qwen, created by Alibaba Cloud. You are a helpful assistant.",
44
  api_name="/model_chat"
45
  )
46
-
47
  return result
48
 
49
  def process_pdf(file):
 
4
  from PyPDF2 import PdfReader
5
  from gradio_client import Client
6
 
 
 
7
  # Initialisiere ChromaDB
8
+ client_chroma = chromadb.Client()
9
+ #client_croma = chromadb.PersistentClient(path="/")
10
  collection_name = "pdf_collection"
11
  collection = client_chroma.get_or_create_collection(name=collection_name)
12
 
 
14
  embedding_function = embedding_functions.DefaultEmbeddingFunction()
15
 
16
  client = Client("Qwen/Qwen2.5-72B-Instruct")
17
+ def ask_llm(llm_prompt_input):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  result = client.predict(
19
+ query=f"{llm_prompt_input}",
20
  history=[],
21
  system="You are Qwen, created by Alibaba Cloud. You are a helpful assistant.",
22
  api_name="/model_chat"
23
  )
24
+ print(result)
25
  return result
26
 
27
  def process_pdf(file):