Spaces:
Sleeping
Sleeping
wzkariampuzha
commited on
Commit
•
45aab27
1
Parent(s):
9d363f8
Update epi_pipeline.py
Browse files- epi_pipeline.py +8 -4
epi_pipeline.py
CHANGED
@@ -224,9 +224,11 @@ def streamlit_getAbs(searchterm_list:Union[List[str],List[int],str], maxResults:
|
|
224 |
import os
|
225 |
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
|
226 |
from tensorflow.keras.preprocessing.sequence import pad_sequences
|
|
|
227 |
import tensorflow as tf
|
228 |
import numpy as np
|
229 |
import spacy
|
|
|
230 |
|
231 |
class Classify_Pipeline:
|
232 |
def __init__(self,model:str='LSTM_RNN_Model'):
|
@@ -235,9 +237,12 @@ class Classify_Pipeline:
|
|
235 |
self.nlpSci = spacy.load("en_ner_bc5cdr_md")
|
236 |
self.nlpSci2 = spacy.load('en_ner_bionlp13cg_md')
|
237 |
# load the tokenizer
|
238 |
-
with open(
|
239 |
-
|
240 |
-
|
|
|
|
|
|
|
241 |
# Defaults to load my_model_orphanet_final, the most up-to-date version of the classification model,
|
242 |
# but can also be run on any other tf.keras model
|
243 |
|
@@ -450,7 +455,6 @@ import re
|
|
450 |
from transformers import BertConfig, AutoModelForTokenClassification, BertTokenizer, Trainer
|
451 |
from unidecode import unidecode
|
452 |
from collections import OrderedDict
|
453 |
-
import json
|
454 |
import pandas as pd
|
455 |
from more_itertools import pairwise
|
456 |
|
|
|
224 |
import os
|
225 |
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
|
226 |
from tensorflow.keras.preprocessing.sequence import pad_sequences
|
227 |
+
from tensorflow.keras.preprocessing.text import tokenizer_from_json
|
228 |
import tensorflow as tf
|
229 |
import numpy as np
|
230 |
import spacy
|
231 |
+
import json
|
232 |
|
233 |
class Classify_Pipeline:
|
234 |
def __init__(self,model:str='LSTM_RNN_Model'):
|
|
|
237 |
self.nlpSci = spacy.load("en_ner_bc5cdr_md")
|
238 |
self.nlpSci2 = spacy.load('en_ner_bionlp13cg_md')
|
239 |
# load the tokenizer
|
240 |
+
with open('tokenizer.json') as f:
|
241 |
+
self.classify_tokenizer = tokenizer_from_json(json.load(f))
|
242 |
+
#OLD Code - used pickle which is unsafe
|
243 |
+
#with open(model+'/tokenizer.pickle', 'rb') as handle:
|
244 |
+
# import pickle
|
245 |
+
# self.classify_tokenizer = pickle.load(handle)
|
246 |
# Defaults to load my_model_orphanet_final, the most up-to-date version of the classification model,
|
247 |
# but can also be run on any other tf.keras model
|
248 |
|
|
|
455 |
from transformers import BertConfig, AutoModelForTokenClassification, BertTokenizer, Trainer
|
456 |
from unidecode import unidecode
|
457 |
from collections import OrderedDict
|
|
|
458 |
import pandas as pd
|
459 |
from more_itertools import pairwise
|
460 |
|