Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,11 +3,24 @@ from transformers import pipeline
|
|
3 |
|
4 |
# Use a pipeline as a high-level helper
|
5 |
|
6 |
-
pipe =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
# Define the function to answer questions
|
9 |
def get_answer(text):
|
10 |
-
|
|
|
11 |
return result["answer"]
|
12 |
|
13 |
# Define the Gradio interface
|
|
|
3 |
|
4 |
# Use a pipeline as a high-level helper
|
5 |
|
6 |
+
pipe =
|
7 |
+
import random
|
8 |
+
|
9 |
+
# List of backend servers (simulated instances of the pipeline)
|
10 |
+
backend_servers = [
|
11 |
+
pipeline("question-answering", model="google-bert/bert-large-uncased-whole-word-masking-finetuned-squad"),
|
12 |
+
pipeline("question-answering", model='distilbert-base-uncased-distilled-squad'),
|
13 |
+
pipe = pipeline("question-answering", model="deepset/roberta-base-squad2")
|
14 |
+
]
|
15 |
+
|
16 |
+
# Function to select a backend server in a round-robin manner
|
17 |
+
def select_backend():
|
18 |
+
return random.choice(backend_servers)
|
19 |
|
20 |
# Define the function to answer questions
|
21 |
def get_answer(text):
|
22 |
+
backend = select_backend()
|
23 |
+
result = backend(question=text, context=knowledge_base_text)
|
24 |
return result["answer"]
|
25 |
|
26 |
# Define the Gradio interface
|