mgokg commited on
Commit
ba6722f
·
verified ·
1 Parent(s): 1b8668d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -12,10 +12,7 @@ collection = client_chroma.get_or_create_collection(name=collection_name)
12
  # Verwende die integrierten Embeddings von ChromaDB
13
  embedding_function = embedding_functions.DefaultEmbeddingFunction()
14
 
15
- # Initialisiere das LLM
16
- client_llm = InferenceClient(
17
- "mistralai/Mistral-7B-Instruct-v0.3"
18
- )
19
  #client_llm = Client("wasmdashai/meta-llama-Llama-3.2-3B-Instruct")
20
 
21
  def process_pdf(file):
@@ -57,12 +54,16 @@ def search_similar_documents(prompt):
57
  return "\n".join(formatted_results)
58
 
59
  def ask_llm(prompt):
 
60
  result = client_llm.predict(
61
  param_0=prompt,
62
- api_name="/predict"
63
- )
 
 
64
  return result
65
 
 
66
  # Erstelle die Gradio-Schnittstelle
67
  with gr.Blocks() as demo:
68
  gr.Markdown("# PDF Upload and Similarity Search with ChromaDB and LLM")
 
12
  # Verwende die integrierten Embeddings von ChromaDB
13
  embedding_function = embedding_functions.DefaultEmbeddingFunction()
14
 
15
+
 
 
 
16
  #client_llm = Client("wasmdashai/meta-llama-Llama-3.2-3B-Instruct")
17
 
18
  def process_pdf(file):
 
54
  return "\n".join(formatted_results)
55
 
56
  def ask_llm(prompt):
57
+ #client = Client("KingNish/OpenGPT-4o")
58
  result = client_llm.predict(
59
  param_0=prompt,
60
+ #user_prompt=prompt,
61
+ #user_prompt={"text":"","files":[]},
62
+ api_name="/chat"
63
+ )
64
  return result
65
 
66
+
67
  # Erstelle die Gradio-Schnittstelle
68
  with gr.Blocks() as demo:
69
  gr.Markdown("# PDF Upload and Similarity Search with ChromaDB and LLM")