rifatramadhani commited on
Commit
d1a27ad
·
1 Parent(s): b3e0d9a

feat: basic sentiment analysis

Browse files
Files changed (1) hide show
  1. app.py +44 -3
app.py CHANGED
@@ -1,7 +1,48 @@
1
  import gradio as gr
 
 
 
 
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
5
 
6
- demo = gr.Interface(fn=greet, inputs="text", outputs="text")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  demo.launch()
 
1
  import gradio as gr
2
+ import spaces
3
+ import torch
4
+ from transformers import pipeline
5
+ import datetime
6
+ import json
7
+ import logging
8
 
9
+ model_path = "cardiffnlp/twitter-roberta-base-sentiment-latest"
10
+ # Load model for cache
11
+ sentiment_task = pipeline("sentiment-analysis", model=model_path, tokenizer=model_path)
12
 
13
+ @spaces.GPU
14
+ def classify(query):
15
+ torch_device = 0 if torch.cuda.is_available() else -1
16
+ sentiment_task = pipeline("sentiment-analysis", model=model_path, tokenizer=model_path, device=torch_device)
17
+
18
+ request_type = type(query)
19
+ try:
20
+ data = json.loads(query)
21
+ if type(data) != list:
22
+ data = [query]
23
+ else:
24
+ request_type = type(data)
25
+ except Exception as e:
26
+ print(e)
27
+ data = [query]
28
+ pass
29
+
30
+ start_time = datetime.datetime.now()
31
+
32
+ result = sentiment_task(data, batch_size=128)
33
+
34
+ end_time = datetime.datetime.now()
35
+ elapsed_time = end_time - start_time
36
+
37
+ logging.debug("elapsed predict time: %s", str(elapsed_time))
38
+ print("elapsed predict time:", str(elapsed_time))
39
+
40
+ output = {}
41
+ output["time"] = str(elapsed_time)
42
+ output["device"] = torch_device
43
+ output["result"] = result
44
+
45
+ return json.dumps(output)
46
+
47
+ demo = gr.Interface(fn=classify, inputs="text", outputs="text")
48
  demo.launch()