Update handler.py
Browse files- handler.py +7 -10
handler.py
CHANGED
@@ -8,7 +8,7 @@ tqdm.pandas()
|
|
8 |
from datasets import load_dataset
|
9 |
from transformers import pipeline
|
10 |
|
11 |
-
class
|
12 |
def __init__(self, path=""):
|
13 |
df_wiki_windows = load_dataset("foxxy-hm/e2eqa-wiki", data_files="processed/wikipedia_20220620_cleaned_v2.csv")["train"].to_pandas()
|
14 |
df_wiki = load_dataset("foxxy-hm/e2eqa-wiki", data_files="wikipedia_20220620_short.csv")["train"].to_pandas()
|
@@ -82,14 +82,11 @@ class EndpointHandler():
|
|
82 |
return final_answer
|
83 |
|
84 |
|
85 |
-
class
|
86 |
def __init__(self):
|
87 |
-
self.
|
88 |
-
|
|
|
89 |
def __call__(self, question):
|
90 |
-
answer = self.endpoint_handler.get_answer_e2e(question)
|
91 |
-
return
|
92 |
-
|
93 |
-
|
94 |
-
inference_pipeline = InferencePipeline()
|
95 |
-
pipeline = pipeline("qa-model", model=inference_pipeline, tokenizer=None)
|
|
|
8 |
from datasets import load_dataset
|
9 |
from transformers import pipeline
|
10 |
|
11 |
+
class InferencePipeline():
|
12 |
def __init__(self, path=""):
|
13 |
df_wiki_windows = load_dataset("foxxy-hm/e2eqa-wiki", data_files="processed/wikipedia_20220620_cleaned_v2.csv")["train"].to_pandas()
|
14 |
df_wiki = load_dataset("foxxy-hm/e2eqa-wiki", data_files="wikipedia_20220620_short.csv")["train"].to_pandas()
|
|
|
82 |
return final_answer
|
83 |
|
84 |
|
85 |
+
class EndpointHandler():
|
86 |
def __init__(self):
|
87 |
+
self.inference_pipeline = InferencePipeline()
|
88 |
+
pipeline = pipeline("qa-model", model=self.inference_pipeline, tokenizer=None)
|
89 |
+
|
90 |
def __call__(self, question):
|
91 |
+
answer = self.endpoint_handler.get_answer_e2e(question)
|
92 |
+
return answer
|
|
|
|
|
|
|
|