Towhidul commited on
Commit
f980ac6
·
verified ·
1 Parent(s): c963472

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -5
app.py CHANGED
@@ -32,8 +32,8 @@ input = tokenizer(premise, hypothesis, truncation=True, return_tensors="pt")
32
  output = model(input["input_ids"].to(device)) # device = "cuda:0" or "cpu"
33
  prediction = torch.softmax(output["logits"][0], -1).tolist()
34
  label_names = ["support", "neutral", "refute"]
35
- prediction = {name: round(float(pred) * 100, 1) for pred, name in zip(prediction, label_names)}
36
- print(prediction)
37
 
38
 
39
  from transformers import pipeline
@@ -69,14 +69,16 @@ def extract_person_names(sentence):
69
  person_name1 = extract_person_names(selected_sentence1)
70
  person_name2 = extract_person_names(selected_sentence2)
71
 
72
- st.write("Result:", prediction)
73
 
74
  col1, col2 = st.columns(2)
75
 
76
  with col1:
77
- st.write("Without Factual Entailment:",prediction)
 
78
 
79
  with col2:
80
- st.write("Factual Entailment:",labels)
 
 
81
  st.write(f"{person_name1}::{person_name2}")
82
 
 
32
  output = model(input["input_ids"].to(device)) # device = "cuda:0" or "cpu"
33
  prediction = torch.softmax(output["logits"][0], -1).tolist()
34
  label_names = ["support", "neutral", "refute"]
35
+ prediction = {name: float(pred) for pred, name in zip(prediction, label_names)}
36
+ highest_label = max(prediction, key=prediction.get)
37
 
38
 
39
  from transformers import pipeline
 
69
  person_name1 = extract_person_names(selected_sentence1)
70
  person_name2 = extract_person_names(selected_sentence2)
71
 
 
72
 
73
  col1, col2 = st.columns(2)
74
 
75
  with col1:
76
+ st.write("Without Factual Entailment.")
77
+ st.write("Textual Entailment Model:\n",highest_label)
78
 
79
  with col2:
80
+ st.write("With Factual Entailment:")
81
+ st.write("Textual Entailment Model:\n",labels)
82
+ st.write("Span Detection Model:\n")
83
  st.write(f"{person_name1}::{person_name2}")
84