Bofandra commited on
Commit
e6d25ac
·
verified ·
1 Parent(s): b1aa160

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -5
app.py CHANGED
@@ -23,7 +23,7 @@ def upload_to_hub(local_path, remote_path):
23
 
24
  # Initialize embedder and LLM client
25
  embedder = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
26
- llm = InferenceClient("google/gemma-7b-it", token=os.getenv("HF_TOKEN"))
27
 
28
  DATA_DIR = "data"
29
  os.makedirs(DATA_DIR, exist_ok=True)
@@ -83,10 +83,21 @@ def ask_question(message, history, selected_titles):
83
  D, I = index.search(q_embed, k=3)
84
  context = "\n".join([chunks[i] for i in I[0]])
85
 
86
- prompt = f"Context:\n{context}\n\nQuestion: {message}\nAnswer:"
87
- print(prompt)
88
- response = llm.text_generation(prompt, max_new_tokens=200)
89
- print(response)
 
 
 
 
 
 
 
 
 
 
 
90
  combined_answer += f"**{title}**:\n{response.strip()}\n\n"
91
  except Exception as e:
92
  combined_answer += f"⚠️ Error with {title}: {str(e)}\n\n"
 
23
 
24
  # Initialize embedder and LLM client
25
  embedder = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
26
+ llm = InferenceClient(token=os.getenv("HF_TOKEN"))
27
 
28
  DATA_DIR = "data"
29
  os.makedirs(DATA_DIR, exist_ok=True)
 
83
  D, I = index.search(q_embed, k=3)
84
  context = "\n".join([chunks[i] for i in I[0]])
85
 
86
+ #prompt = f"Context:\n{context}\n\nQuestion: {message}\nAnswer:"
87
+ #print(prompt)
88
+ response = llm.chat_completion(
89
+ messages=[
90
+ {"role": "system", "content": "You are a helpful assistant. Answer based only on the given context."},
91
+ {"role": "user", "content": f"Context:\n{context}\n\nQuestion: {message}"}
92
+ ],
93
+ model="deepseek-ai/DeepSeek-R1-0528",
94
+ max_tokens=200,
95
+ )
96
+
97
+ response = response.choices[0].message["content"]
98
+
99
+ #response = llm.text_generation(prompt, max_new_tokens=200)
100
+ #print(response)
101
  combined_answer += f"**{title}**:\n{response.strip()}\n\n"
102
  except Exception as e:
103
  combined_answer += f"⚠️ Error with {title}: {str(e)}\n\n"