Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,7 @@ setattr(httpcore, 'SyncHTTPTransport', 'AsyncHTTPProxy')
|
|
12 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
13 |
"""
|
14 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
|
|
15 |
|
16 |
def get_detailed_instruct(task_description: str, query: str) -> str:
|
17 |
return f'Instruct: {task_description}\nQuery: {query}'
|
@@ -40,7 +41,6 @@ def respond(
|
|
40 |
get_detailed_instruct(task, message)
|
41 |
]
|
42 |
|
43 |
-
model = SentenceTransformer('intfloat/multilingual-e5-large-instruct')
|
44 |
query_embeddings = model.encode(queries, convert_to_tensor=True, normalize_embeddings=True)
|
45 |
scores = (query_embeddings @ encoded_questions.T) * 100
|
46 |
selected_references['similarity'] = scores.tolist()[0]
|
|
|
12 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
13 |
"""
|
14 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
15 |
+
model = SentenceTransformer('intfloat/multilingual-e5-large-instruct')
|
16 |
|
17 |
def get_detailed_instruct(task_description: str, query: str) -> str:
|
18 |
return f'Instruct: {task_description}\nQuery: {query}'
|
|
|
41 |
get_detailed_instruct(task, message)
|
42 |
]
|
43 |
|
|
|
44 |
query_embeddings = model.encode(queries, convert_to_tensor=True, normalize_embeddings=True)
|
45 |
scores = (query_embeddings @ encoded_questions.T) * 100
|
46 |
selected_references['similarity'] = scores.tolist()[0]
|