adityasrathore commited on
Commit
81adca3
1 Parent(s): 49c39ca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -0
app.py CHANGED
@@ -1,3 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  demo = gr.Interface()
2
 
3
  with demo:
 
1
+ import os
2
+ os.system("pip install gradio==3.0.18")
3
+ from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification, AutoModelForTokenClassification
4
+ import gradio as gr
5
+ import spacy
6
+ nlp = spacy.load('en_core_web_sm')
7
+ nlp.add_pipe('sentencizer')
8
+
9
+ def split_in_sentences(text):
10
+ doc = nlp(text)
11
+ return [str(sent).strip() for sent in doc.sents]
12
+
13
+ def make_spans(text,results):
14
+ results_list = []
15
+ for i in range(len(results)):
16
+ results_list.append(results[i]['label'])
17
+ facts_spans = []
18
+ facts_spans = list(zip(split_in_sentences(text),results_list))
19
+ return facts_spans
20
+
21
+ auth_token = os.environ.get("HF_Token")
22
+
23
+ ##Speech Recognition
24
+ asr = pipeline("automatic-speech-recognition", "facebook/wav2vec2-base-960h")
25
+ def transcribe(audio):
26
+ text = asr(audio)["text"]
27
+ return text
28
+ def speech_to_text(speech):
29
+ text = asr(speech)["text"]
30
+ return text
31
+
32
+ ##Summarization
33
+ summarizer = pipeline("summarization", model="knkarthick/MEETING_SUMMARY")
34
+ def summarize_text(text):
35
+ resp = summarizer(text)
36
+ stext = resp[0]['summary_text']
37
+ return stext
38
+
39
+ ##Fiscal Tone Analysis
40
+ fin_model= pipeline("sentiment-analysis", model='yiyanghkust/finbert-tone', tokenizer='yiyanghkust/finbert-tone')
41
+ def text_to_sentiment(text):
42
+ sentiment = fin_model(text)[0]["label"]
43
+ return sentiment
44
+
45
+ ##Company Extraction
46
+ def fin_ner(text):
47
+ api = gr.Interface.load("dslim/bert-base-NER", src='models', use_auth_token=auth_token)
48
+ replaced_spans = api(text)
49
+ return replaced_spans
50
+
51
+ ##Fiscal Sentiment by Sentence
52
+ def fin_ext(text):
53
+ results = fin_model(split_in_sentences(text))
54
+ return make_spans(text,results)
55
+
56
+ ##Forward Looking Statement
57
+ def fls(text):
58
+ # fls_model = pipeline("text-classification", model="yiyanghkust/finbert-fls", tokenizer="yiyanghkust/finbert-fls")
59
+ fls_model = pipeline("text-classification", model="demo-org/finbert_fls", tokenizer="demo-org/finbert_fls", use_auth_token=auth_token)
60
+ results = fls_model(split_in_sentences(text))
61
+ return make_spans(text,results)
62
+
63
  demo = gr.Interface()
64
 
65
  with demo: