manu commited on
Commit
eb2ce3a
·
1 Parent(s): 26ed9a4

more fixes!

Browse files
Files changed (2) hide show
  1. app.py +7 -4
  2. requirements.txt +1 -1
app.py CHANGED
@@ -1,12 +1,13 @@
1
  import numpy as np
2
  import gradio as gr
3
- from transformers import TFAutoModelForSequenceClassification, AutoTokenizer
 
4
 
5
  checkpoint="MoritzLaurer/DeBERTa-v3-base-mnli-fever-anli"
6
 
7
 
8
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
9
- model=TFAutoModelForSequenceClassification.from_pretrained(checkpoint,from_pt=True)
10
 
11
  title = "The Seagull story"
12
  description = """
@@ -64,8 +65,10 @@ def generate_tone(index,question):
64
  """.replace("\n","")
65
 
66
  ]
67
- inputs=tokenizer(passages[index],question, return_tensors="tf")
68
- return (model(**inputs).logits)
 
 
69
 
70
  passages=["General","Pier","Boat","Island"]
71
 
 
1
  import numpy as np
2
  import gradio as gr
3
+ import torch
4
+ from transformers import AutoModelForSequenceClassification, AutoTokenizer
5
 
6
  checkpoint="MoritzLaurer/DeBERTa-v3-base-mnli-fever-anli"
7
 
8
 
9
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
10
+ model=AutoModelForSequenceClassification.from_pretrained(checkpoint)
11
 
12
  title = "The Seagull story"
13
  description = """
 
65
  """.replace("\n","")
66
 
67
  ]
68
+ input = tokenizer(passages[index], question, truncation=True, return_tensors="pt")
69
+ output = model(input["input_ids"].to("cpu")) # device = "cuda:0" or "cpu"
70
+ prediction = torch.softmax(output["logits"][0], -1).tolist()
71
+ return prediction
72
 
73
  passages=["General","Pier","Boat","Island"]
74
 
requirements.txt CHANGED
@@ -1,3 +1,3 @@
1
  transformers
2
  sentencepiece
3
- tensorflow
 
1
  transformers
2
  sentencepiece
3
+ torch