Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -185,6 +185,9 @@ def fine_tune_cuad_model():
|
|
185 |
# Load NLP Models #
|
186 |
#############################
|
187 |
|
|
|
|
|
|
|
188 |
try:
|
189 |
try:
|
190 |
nlp = spacy.load("en_core_web_sm")
|
@@ -193,8 +196,13 @@ try:
|
|
193 |
nlp = spacy.load("en_core_web_sm")
|
194 |
print("✅ Loading NLP models...")
|
195 |
|
196 |
-
summarizer =
|
197 |
-
|
|
|
|
|
|
|
|
|
|
|
198 |
embedding_model = SentenceTransformer("all-mpnet-base-v2", device=device)
|
199 |
ner_model = pipeline("ner", model="dslim/bert-base-NER",
|
200 |
device=0 if torch.cuda.is_available() else -1)
|
@@ -203,6 +211,9 @@ try:
|
|
203 |
chunk_length_s=30,
|
204 |
device_map="auto" if torch.cuda.is_available() else "cpu")
|
205 |
|
|
|
|
|
|
|
206 |
# Load or Fine Tune CUAD QA Model
|
207 |
if os.path.exists("fine_tuned_legal_qa"):
|
208 |
print("✅ Loading fine-tuned CUAD QA model from fine_tuned_legal_qa...")
|
|
|
185 |
# Load NLP Models #
|
186 |
#############################
|
187 |
|
188 |
+
# ... (other imports and code remain unchanged)
|
189 |
+
|
190 |
+
# Load NLP models
|
191 |
try:
|
192 |
try:
|
193 |
nlp = spacy.load("en_core_web_sm")
|
|
|
196 |
nlp = spacy.load("en_core_web_sm")
|
197 |
print("✅ Loading NLP models...")
|
198 |
|
199 |
+
# Updated summarizer: add trust_remote_code=True to load custom model files if needed.
|
200 |
+
summarizer = pipeline(
|
201 |
+
"summarization",
|
202 |
+
model="nsi319/legal-pegasus",
|
203 |
+
trust_remote_code=True,
|
204 |
+
device=0 if torch.cuda.is_available() else -1
|
205 |
+
)
|
206 |
embedding_model = SentenceTransformer("all-mpnet-base-v2", device=device)
|
207 |
ner_model = pipeline("ner", model="dslim/bert-base-NER",
|
208 |
device=0 if torch.cuda.is_available() else -1)
|
|
|
211 |
chunk_length_s=30,
|
212 |
device_map="auto" if torch.cuda.is_available() else "cpu")
|
213 |
|
214 |
+
# ... (rest of your model loading code remains unchanged)
|
215 |
+
|
216 |
+
|
217 |
# Load or Fine Tune CUAD QA Model
|
218 |
if os.path.exists("fine_tuned_legal_qa"):
|
219 |
print("✅ Loading fine-tuned CUAD QA model from fine_tuned_legal_qa...")
|