Update app.py
Browse files
app.py
CHANGED
@@ -15,14 +15,15 @@ def get_top_chunks(query):
|
|
15 |
query_embedding = model.encode(query, convert_to_tensor=True)
|
16 |
query_embedding_normalized = query_embedding / query_embedding.norm()
|
17 |
similarities = torch.matmul(chunk_embeddings, query_embedding_normalized)
|
18 |
-
top_indices = torch.topk(similarities, k=
|
19 |
return [cleaned_chunks[i] for i in top_indices]
|
20 |
|
21 |
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2")
|
22 |
|
23 |
def respond(message, history, name, char_class, char_alignment, char_race, campaign, party):
|
|
|
24 |
response = ""
|
25 |
-
top_chunks = get_top_chunks(message)
|
26 |
context = "\n".join(top_chunks)
|
27 |
|
28 |
messages = [
|
|
|
15 |
query_embedding = model.encode(query, convert_to_tensor=True)
|
16 |
query_embedding_normalized = query_embedding / query_embedding.norm()
|
17 |
similarities = torch.matmul(chunk_embeddings, query_embedding_normalized)
|
18 |
+
top_indices = torch.topk(similarities, k=20).indices.tolist()
|
19 |
return [cleaned_chunks[i] for i in top_indices]
|
20 |
|
21 |
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2")
|
22 |
|
23 |
def respond(message, history, name, char_class, char_alignment, char_race, campaign, party):
|
24 |
+
|
25 |
response = ""
|
26 |
+
top_chunks = get_top_chunks(f"{char_class}, {char_alignment}, {char_race}, {campaign},{party},{message}")
|
27 |
context = "\n".join(top_chunks)
|
28 |
|
29 |
messages = [
|