Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -5,19 +5,19 @@ import os
|
|
5 |
import json
|
6 |
import random
|
7 |
import gradio as gr
|
|
|
|
|
|
|
|
|
8 |
from sklearn.ensemble import IsolationForest, RandomForestClassifier
|
9 |
from sklearn.model_selection import train_test_split
|
10 |
from sklearn.preprocessing import OneHotEncoder
|
11 |
from sklearn.neural_network import MLPClassifier
|
12 |
from deap import base, creator, tools, algorithms
|
13 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
14 |
-
import
|
15 |
-
import torch.nn as nn
|
16 |
-
import torch.optim as optim
|
17 |
-
from torch.utils.data import DataLoader, TensorDataset, IterableDataset
|
18 |
import multiprocessing as mp
|
19 |
from joblib import Parallel, delayed
|
20 |
-
import gc
|
21 |
|
22 |
warnings.filterwarnings('ignore', category=FutureWarning, module='huggingface_hub.file_download')
|
23 |
|
@@ -233,8 +233,10 @@ def generate_text(prompt, max_length=100):
|
|
233 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
234 |
return generated_text
|
235 |
|
236 |
-
|
237 |
-
|
|
|
|
|
238 |
def get_sentiment(text):
|
239 |
result = sentiment_pipeline(text)[0]
|
240 |
return f"Sentiment: {result['label']}, Score: {result['score']:.4f}"
|
@@ -288,3 +290,4 @@ iface = gr.Interface(
|
|
288 |
)
|
289 |
|
290 |
iface.launch()
|
|
|
|
5 |
import json
|
6 |
import random
|
7 |
import gradio as gr
|
8 |
+
import torch
|
9 |
+
import torch.nn as nn
|
10 |
+
import torch.optim as optim
|
11 |
+
from torch.utils.data import DataLoader, TensorDataset, IterableDataset
|
12 |
from sklearn.ensemble import IsolationForest, RandomForestClassifier
|
13 |
from sklearn.model_selection import train_test_split
|
14 |
from sklearn.preprocessing import OneHotEncoder
|
15 |
from sklearn.neural_network import MLPClassifier
|
16 |
from deap import base, creator, tools, algorithms
|
17 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
18 |
+
import gc
|
|
|
|
|
|
|
19 |
import multiprocessing as mp
|
20 |
from joblib import Parallel, delayed
|
|
|
21 |
|
22 |
warnings.filterwarnings('ignore', category=FutureWarning, module='huggingface_hub.file_download')
|
23 |
|
|
|
233 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
234 |
return generated_text
|
235 |
|
236 |
+
model_name = "distilbert-base-uncased-finetuned-sst-2-english"
|
237 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
238 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
239 |
+
sentiment_pipeline = pipeline("sentiment-analysis", model=model, tokenizer=tokenizer)
|
240 |
def get_sentiment(text):
|
241 |
result = sentiment_pipeline(text)[0]
|
242 |
return f"Sentiment: {result['label']}, Score: {result['score']:.4f}"
|
|
|
290 |
)
|
291 |
|
292 |
iface.launch()
|
293 |
+
|