PRNKPS commited on
Commit
ae01bd2
·
1 Parent(s): 4196add

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -14
app.py CHANGED
@@ -1,17 +1,21 @@
1
- import streamlit as st
2
- from transformers import pipeline
3
 
4
- classifier = pipeline("translation", model="t5-small")
5
- def main():
6
- st.title("English to German")
7
 
8
- with st.form("text_field"):
9
- text = st.text_area('enter some english word:')
10
- # clicked==True only when the button is clicked
11
- clicked = st.form_submit_button("Submit")
12
- if clicked:
13
- results = classifier([text])
14
- st.json(results)
15
 
16
- if __name__ == "__main__":
17
- main()
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoTokenizer, AutoModelForQuestionAnswering,
3
 
4
+ tokenizer = AutoTokenizer.from_pretrained("valhalla/longformer-base-4096-finetuned-squadv1")
5
+ model = AutoModelForQuestionAnswering.from_pretrained("valhalla/longformer-base-4096-finetuned-squadv1")
 
6
 
7
+ text = "Huggingface has democratized NLP. Huge thanks to Huggingface for this."
8
+ question = "What has Huggingface done ?"
9
+ encoding = tokenizer(question, text, return_tensors="pt")
10
+ input_ids = encoding["input_ids"]
 
 
 
11
 
12
+ # default is local attention everywhere
13
+ # the forward method will automatically set global attention on question tokens
14
+ attention_mask = encoding["attention_mask"]
15
+
16
+ start_scores, end_scores = model(input_ids, attention_mask=attention_mask)
17
+ all_tokens = tokenizer.convert_ids_to_tokens(input_ids[0].tolist())
18
+
19
+ answer_tokens = all_tokens[torch.argmax(start_scores) :torch.argmax(end_scores)+1]
20
+ answer = tokenizer.decode(tokenizer.convert_tokens_to_ids(answer_tokens))
21
+ # output => democratized NLP