Nfanlo commited on
Commit
486464f
1 Parent(s): 9bbabec

showcase_app to spanish

Browse files
Files changed (1) hide show
  1. showcase_app.py +31 -31
showcase_app.py CHANGED
@@ -15,14 +15,14 @@ from filters import (
15
  remove_urls,
16
  )
17
 
18
- st.sidebar.markdown("## Models loaded")
19
 
20
 
21
- st.title("Sentiment Analys for Spanish Tweets!")
22
  st.write(
23
- "I use the Hugging Face Transformers library to clasify the sentiment \
24
- of tweets passed as input as postive, neutral or negative. \
25
- This app is built using [Streamlit](https://docs.streamlit.io/en/stable/getting_started.html)."
26
  )
27
 
28
  models = [
@@ -34,12 +34,12 @@ models = [
34
  "francisco-perez-sorrosal/dccuchile-distilbert-base-spanish-uncased-finetuned-with-spanish-tweets-clf-cleaned-ds",
35
  ]
36
 
37
- load_all_models = st.checkbox("Load all models?")
38
 
39
  if "pipelines" not in st.session_state:
40
  st.session_state.pipelines = []
41
  for model in models:
42
- with st.spinner(f"Loading model {model}"):
43
  pipe = pipeline(
44
  "text-classification",
45
  model=AutoModelForSequenceClassification.from_pretrained(model),
@@ -47,8 +47,8 @@ if "pipelines" not in st.session_state:
47
  return_all_scores=True,
48
  )
49
  st.sidebar.subheader(pipe.model.config.name_or_path)
50
- st.sidebar.write(f"Tokenizer:\n{pipe.tokenizer}")
51
- st.sidebar.write(f"Model:\n{pipe.model.config}")
52
  st.session_state.pipelines.append(pipe)
53
  if not load_all_models:
54
  break
@@ -63,62 +63,62 @@ def update_model(
63
  if not load_all_models:
64
  if local_model_id:
65
  model_id = local_model_id
66
- st.text(f"Loading model {model_id}")
67
  pipe = pipeline(
68
- "text-classification",
69
  model=AutoModelForSequenceClassification.from_pretrained(model_id),
70
  tokenizer=AutoTokenizer.from_pretrained(model_id),
71
  return_all_scores=True,
72
  )
73
  st.sidebar.subheader(pipe.model.config.name_or_path)
74
- st.sidebar.write(f"Tokenizer:\n{pipe.tokenizer}")
75
- st.sidebar.write(f"Model:\n{pipe.model.config}")
76
  st.session_state.pipelines.append(pipe)
77
  else:
78
  for model in models:
79
- with st.spinner(f"Loading model {model}"):
80
  pipe = pipeline(
81
- "text-classification",
82
  model=AutoModelForSequenceClassification.from_pretrained(model),
83
  tokenizer=AutoTokenizer.from_pretrained(model),
84
  return_all_scores=True,
85
  )
86
  st.sidebar.subheader(pipe.model.config.name_or_path)
87
- st.sidebar.write(f"Tokenizer:\n{pipe.tokenizer}")
88
- st.sidebar.write(f"Model:\n{pipe.model.config}")
89
  st.session_state.pipelines.append(pipe)
90
  st.session_state.last_updated = datetime.datetime.now().time()
91
 
92
 
93
- model_id = st.selectbox(f"Choose a model {load_all_models}", models)
94
- local_model_id = st.text_input(f"Choose a model from local disk")
95
  st.button(
96
- "Load model/s",
97
  on_click=update_model,
98
  args=(
99
  local_model_id,
100
  model_id,
101
  ),
102
  )
103
- st.write("Last Updated = ", st.session_state.last_updated)
104
 
105
 
106
- form = st.form(key="sentiment-form")
107
- tweet_text = form.text_area("Enter your tweet text")
108
- clean_input = form.checkbox("Clean input text?")
109
- ask_chatgpt = form.checkbox("Ask ChatGPT too?")
110
  openai_key = form.text_input("OpenAI Key")
111
- submit = form.form_submit_button("Submit")
112
 
113
  if submit:
114
  if clean_input:
115
  tweet_text = remove_twitter_handles({"text": tweet_text})["text"]
116
  tweet_text = remove_urls({"text": tweet_text})["text"]
117
  tweet_text = clean_html({"text": tweet_text})["text"]
118
- st.write(f"Sending this tweet content to the model: {tweet_text}")
119
 
120
  for classifier in st.session_state.pipelines:
121
- st.subheader(f"Model\n{classifier.model.config.name_or_path}")
122
  result = classifier(tweet_text)
123
  st.json(result, expanded=False)
124
  predictions = result[0]
@@ -130,11 +130,11 @@ if submit:
130
  score = p["score"]
131
 
132
  if label == "P":
133
- st.success(f"{label} sentiment (score: {score})")
134
  elif label == "NEU":
135
- st.warning(f"{label} sentiment (score: {score})")
136
  else:
137
- st.error(f"{label} sentiment (score: {score})")
138
 
139
  if ask_chatgpt:
140
  openai.api_key = openai_key
 
15
  remove_urls,
16
  )
17
 
18
+ st.sidebar.markdown("## Modelos cargados")
19
 
20
 
21
+ st.title("Clasificador de sentimientos para Tweets en Espa帽ol")
22
  st.write(
23
+ "Utilizo la biblioteca Huggin Face Transformers para clasificar el sentimiento \
24
+ de tweets pasados como entrada para positivos, neutrales o negativos . \
25
+ Esta aplicaci贸n est谩 construida usando [Streamlit](https://docs.streamlit.io/en/stable/getting_started.html)."
26
  )
27
 
28
  models = [
 
34
  "francisco-perez-sorrosal/dccuchile-distilbert-base-spanish-uncased-finetuned-with-spanish-tweets-clf-cleaned-ds",
35
  ]
36
 
37
+ load_all_models = st.checkbox("驴Cargar todos los modelos?")
38
 
39
  if "pipelines" not in st.session_state:
40
  st.session_state.pipelines = []
41
  for model in models:
42
+ with st.spinner(f"Cargando modelo {model}"):
43
  pipe = pipeline(
44
  "text-classification",
45
  model=AutoModelForSequenceClassification.from_pretrained(model),
 
47
  return_all_scores=True,
48
  )
49
  st.sidebar.subheader(pipe.model.config.name_or_path)
50
+ st.sidebar.write(f"Tokenizador:\n{pipe.tokenizer}")
51
+ st.sidebar.write(f"Modelo:\n{pipe.model.config}")
52
  st.session_state.pipelines.append(pipe)
53
  if not load_all_models:
54
  break
 
63
  if not load_all_models:
64
  if local_model_id:
65
  model_id = local_model_id
66
+ st.text(f"Cargando modelo {model_id}")
67
  pipe = pipeline(
68
+ "clasificaci贸n-de-texto",
69
  model=AutoModelForSequenceClassification.from_pretrained(model_id),
70
  tokenizer=AutoTokenizer.from_pretrained(model_id),
71
  return_all_scores=True,
72
  )
73
  st.sidebar.subheader(pipe.model.config.name_or_path)
74
+ st.sidebar.write(f"Tokenizador:\n{pipe.tokenizer}")
75
+ st.sidebar.write(f"Modelo:\n{pipe.model.config}")
76
  st.session_state.pipelines.append(pipe)
77
  else:
78
  for model in models:
79
+ with st.spinner(f"Cargando modelo {model}"):
80
  pipe = pipeline(
81
+ "clasificaci贸n-de-texto",
82
  model=AutoModelForSequenceClassification.from_pretrained(model),
83
  tokenizer=AutoTokenizer.from_pretrained(model),
84
  return_all_scores=True,
85
  )
86
  st.sidebar.subheader(pipe.model.config.name_or_path)
87
+ st.sidebar.write(f"Tokenizador:\n{pipe.tokenizer}")
88
+ st.sidebar.write(f"Modelo:\n{pipe.model.config}")
89
  st.session_state.pipelines.append(pipe)
90
  st.session_state.last_updated = datetime.datetime.now().time()
91
 
92
 
93
+ model_id = st.selectbox(f"Elige un modelo {load_all_models}", models)
94
+ local_model_id = st.text_input(f"Elige un modelo del disco local")
95
  st.button(
96
+ "Cargando modelo/s",
97
  on_click=update_model,
98
  args=(
99
  local_model_id,
100
  model_id,
101
  ),
102
  )
103
+ st.write("Ultima actualizaci贸n = ", st.session_state.last_updated)
104
 
105
 
106
+ form = st.form(key="sentimient")
107
+ tweet_text = form.text_area("Introduce tu texto")
108
+ clean_input = form.checkbox("驴Limpiar el texto?")
109
+ ask_chatgpt = form.checkbox("驴Preguntarle a ChatGPT?")
110
  openai_key = form.text_input("OpenAI Key")
111
+ submit = form.form_submit_button("Cargar")
112
 
113
  if submit:
114
  if clean_input:
115
  tweet_text = remove_twitter_handles({"text": tweet_text})["text"]
116
  tweet_text = remove_urls({"text": tweet_text})["text"]
117
  tweet_text = clean_html({"text": tweet_text})["text"]
118
+ st.write(f"Enviando este texto al modelo: {tweet_text}")
119
 
120
  for classifier in st.session_state.pipelines:
121
+ st.subheader(f"Modelo\n{classifier.model.config.name_or_path}")
122
  result = classifier(tweet_text)
123
  st.json(result, expanded=False)
124
  predictions = result[0]
 
130
  score = p["score"]
131
 
132
  if label == "P":
133
+ st.success(f"{label} sentimiento (puntuaci贸n: {score})")
134
  elif label == "NEU":
135
+ st.warning(f"{label} sentimiento (puntuaci贸n: {score})")
136
  else:
137
+ st.error(f"{label} sentimiento (puntuaci贸n: {score})")
138
 
139
  if ask_chatgpt:
140
  openai.api_key = openai_key