Spaces:
Runtime error
Runtime error
File size: 1,926 Bytes
68a6256 ff62df8 68a6256 ff62df8 68a6256 ff62df8 68a6256 ff62df8 68a6256 a5faac0 ff62df8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
import requests
import gradio as gr
import preprocessor as tweet_cleaner
# from transformers import pipeline
# pretrained_name = "w11wo/indonesian-roberta-base-sentiment-classifier"
# sentiment = pipeline(
# "sentiment-analysis",
# model=pretrained_name,
# tokenizer=pretrained_name,
# max_length=512,
# truncation=True,
# )
API_URL = "https://api-inference.huggingface.co/models/w11wo/indonesian-roberta-base-sentiment-classifier"
headers = {"Authorization": "Bearer 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"}
def format_sentiment(predictions):
formatted_output = dict()
for p in predictions:
if p['label'] == 'positive':
formatted_output['Positif'] = p['score']
elif p['label'] == 'negative':
formatted_output['Negatif'] = p['score']
else:
formatted_output['Netral'] = p['score']
return formatted_output
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def clean_tweet(tweet):
return tweet_cleaner.clean(tweet)
def get_sentiment(input_text):
res = query({"inputs": clean_tweet(input_text)})
formatted_output = format_sentiment(res[0])
return formatted_output
examples = list()
examples.append("Semoga saja pelayanan BPJS ke depannya semakin baik. #BPJSKesehatan #TerimaKasihBPJS #BPJSMelayani https://t.co/iDETFSXFJR")
examples.append("min ini mau bayar ko ga bisa yaa m banking sama shopee nya kenapa. Help min udah tenggat nih")
examples.append("Kenaikan harga bpjs yg makin mahal bikin rakyat jadi tambah sengsara pak!")
iface = gr.Interface(
fn = get_sentiment,
inputs = 'text',
outputs = ['label'],
title = 'Analisis Sentimen Twitter',
description="Dapatkan sentimen positif, negatif, atau netral untuk tweet yang dimasukkan.",
examples=examples
)
iface.launch(inline = False) |