modifiquei a estrutura da API
Browse files- app.py +5 -5
- requirements.txt +2 -1
app.py
CHANGED
@@ -1,16 +1,16 @@
|
|
1 |
import streamlit as st
|
2 |
-
from transformers import
|
3 |
|
4 |
# Substitua 'seu-nome-de-usuário/nome-do-modelo' pelo caminho correto do seu modelo no Hugging Face Hub
|
5 |
MODEL_NAME = "exo-is/t5-small-60M-esg-keyword"
|
6 |
|
7 |
@st.cache_resource
|
8 |
def load_model():
|
9 |
-
tokenizer =
|
10 |
-
model =
|
11 |
-
return pipeline('
|
12 |
|
13 |
-
st.title('Gerador de Texto')
|
14 |
|
15 |
gerador = load_model()
|
16 |
|
|
|
1 |
import streamlit as st
|
2 |
+
from transformers import T5Tokenizer, T5ForConditionalGeneration, pipeline
|
3 |
|
4 |
# Substitua 'seu-nome-de-usuário/nome-do-modelo' pelo caminho correto do seu modelo no Hugging Face Hub
|
5 |
MODEL_NAME = "exo-is/t5-small-60M-esg-keyword"
|
6 |
|
7 |
@st.cache_resource
|
8 |
def load_model():
|
9 |
+
tokenizer = T5Tokenizer.from_pretrained(MODEL_NAME)
|
10 |
+
model = T5ForConditionalGeneration.from_pretrained(MODEL_NAME)
|
11 |
+
return pipeline('text2text-generation', model=model, tokenizer=tokenizer)
|
12 |
|
13 |
+
st.title('Gerador de Texto T5')
|
14 |
|
15 |
gerador = load_model()
|
16 |
|
requirements.txt
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
streamlit==1.25.0
|
2 |
transformers==4.33.2
|
3 |
torch==2.0.1
|
4 |
-
tokenizers==0.13.3
|
|
|
|
1 |
streamlit==1.25.0
|
2 |
transformers==4.33.2
|
3 |
torch==2.0.1
|
4 |
+
tokenizers==0.13.3
|
5 |
+
sentencepiece==0.1.99
|