Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -11,6 +11,7 @@ from transformers import pipeline
|
|
11 |
from datasets import load_dataset
|
12 |
import fitz # PyMuPDF
|
13 |
|
|
|
14 |
dataset = load_dataset("ibunescu/qa_legal_dataset_train")
|
15 |
|
16 |
# Different pipelines for different tasks
|
@@ -18,6 +19,7 @@ qa_pipeline = pipeline("question-answering", model="deepset/roberta-base-squad2"
|
|
18 |
summarization_pipeline = pipeline("summarization", model="facebook/bart-large-cnn")
|
19 |
mask_filling_pipeline = pipeline("fill-mask", model="nlpaueb/legal-bert-base-uncased")
|
20 |
|
|
|
21 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
22 |
|
23 |
def respond(
|
@@ -69,7 +71,7 @@ def generate_case_outcome(prosecutor_response, defense_response):
|
|
69 |
|
70 |
def determine_winner(outcome):
|
71 |
if "Prosecutor" in outcome and "Defense" in outcome:
|
72 |
-
if
|
73 |
return "Prosecutor Wins"
|
74 |
else:
|
75 |
return "Defense Wins"
|
@@ -192,7 +194,7 @@ def update_pdf_gallery_and_extract_text(pdf_files):
|
|
192 |
return pdf_files, pdf_text
|
193 |
|
194 |
def get_top_10_cases():
|
195 |
-
result = summarization_pipeline("
|
196 |
return result[0]['summary_text']
|
197 |
|
198 |
def add_message(history, message):
|
@@ -281,6 +283,14 @@ with gr.Blocks(css=custom_css) as demo:
|
|
281 |
submit_btn.click(chat_between_bots, inputs=[system_message1, system_message2, max_tokens, temperature, top_p, history1, history2, shared_history, message], outputs=[prosecutor_response, defense_response, history1, history2, shared_argument, winner])
|
282 |
clear_btn.click(reset_conversation, outputs=[history1, history2, shared_history, prosecutor_response, defense_response, shared_argument, winner])
|
283 |
save_btn.click(save_conversation, inputs=[history1, history2, shared_history], outputs=[history1, history2, shared_history])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
284 |
|
285 |
with gr.Tab("PDF Management"):
|
286 |
pdf_upload = gr.File(label="Upload Case Files (PDF)", file_types=[".pdf"])
|
@@ -309,13 +319,6 @@ with gr.Blocks(css=custom_css) as demo:
|
|
309 |
bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
|
310 |
|
311 |
chatbot.like(print_like_dislike, None, None)
|
312 |
-
|
313 |
-
with gr.Tab("Case Outcome Chat"):
|
314 |
-
case_question = gr.Textbox(label="Ask a Question about the Case Outcome")
|
315 |
-
case_answer = gr.Textbox(label="Answer", interactive=False, elem_classes=["scroll-box"])
|
316 |
-
ask_case_btn = gr.Button("Ask")
|
317 |
-
|
318 |
-
ask_case_btn.click(ask_about_case_outcome, inputs=[shared_history, case_question], outputs=case_answer)
|
319 |
|
320 |
demo.queue()
|
321 |
demo.launch()
|
|
|
11 |
from datasets import load_dataset
|
12 |
import fitz # PyMuPDF
|
13 |
|
14 |
+
# Loading datasets
|
15 |
dataset = load_dataset("ibunescu/qa_legal_dataset_train")
|
16 |
|
17 |
# Different pipelines for different tasks
|
|
|
19 |
summarization_pipeline = pipeline("summarization", model="facebook/bart-large-cnn")
|
20 |
mask_filling_pipeline = pipeline("fill-mask", model="nlpaueb/legal-bert-base-uncased")
|
21 |
|
22 |
+
# Inference client for chat completion
|
23 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
24 |
|
25 |
def respond(
|
|
|
71 |
|
72 |
def determine_winner(outcome):
|
73 |
if "Prosecutor" in outcome and "Defense" in outcome:
|
74 |
+
if outcome.count("Prosecutor") > outcome.count("Defense"):
|
75 |
return "Prosecutor Wins"
|
76 |
else:
|
77 |
return "Defense Wins"
|
|
|
194 |
return pdf_files, pdf_text
|
195 |
|
196 |
def get_top_10_cases():
|
197 |
+
result = summarization_pipeline("Show 10 legal case with names and numbers from any State Court of Appeals in the US.", max_length=150, min_length=50, do_sample=False)
|
198 |
return result[0]['summary_text']
|
199 |
|
200 |
def add_message(history, message):
|
|
|
283 |
submit_btn.click(chat_between_bots, inputs=[system_message1, system_message2, max_tokens, temperature, top_p, history1, history2, shared_history, message], outputs=[prosecutor_response, defense_response, history1, history2, shared_argument, winner])
|
284 |
clear_btn.click(reset_conversation, outputs=[history1, history2, shared_history, prosecutor_response, defense_response, shared_argument, winner])
|
285 |
save_btn.click(save_conversation, inputs=[history1, history2, shared_history], outputs=[history1, history2, shared_history])
|
286 |
+
|
287 |
+
# Inner HTML for asking about the case outcome
|
288 |
+
with gr.Row():
|
289 |
+
case_question = gr.Textbox(label="Ask a Question about the Case Outcome")
|
290 |
+
case_answer = gr.Textbox(label="Answer", interactive=False, elem_classes=["scroll-box"])
|
291 |
+
ask_case_btn = gr.Button("Ask")
|
292 |
+
|
293 |
+
ask_case_btn.click(ask_about_case_outcome, inputs=[shared_history, case_question], outputs=case_answer)
|
294 |
|
295 |
with gr.Tab("PDF Management"):
|
296 |
pdf_upload = gr.File(label="Upload Case Files (PDF)", file_types=[".pdf"])
|
|
|
319 |
bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
|
320 |
|
321 |
chatbot.like(print_like_dislike, None, None)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
322 |
|
323 |
demo.queue()
|
324 |
demo.launch()
|