Titobsala commited on
Commit
c3a3adb
·
1 Parent(s): 63da717

modifiquei a estrutura da API

Browse files
Files changed (2) hide show
  1. app.py +5 -5
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,16 +1,16 @@
1
  import streamlit as st
2
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
 
4
  # Substitua 'seu-nome-de-usuário/nome-do-modelo' pelo caminho correto do seu modelo no Hugging Face Hub
5
  MODEL_NAME = "exo-is/t5-small-60M-esg-keyword"
6
 
7
  @st.cache_resource
8
  def load_model():
9
- tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, use_fast=False)
10
- model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
11
- return pipeline('text-generation', model=model, tokenizer=tokenizer)
12
 
13
- st.title('Gerador de Texto')
14
 
15
  gerador = load_model()
16
 
 
1
  import streamlit as st
2
+ from transformers import T5Tokenizer, T5ForConditionalGeneration, pipeline
3
 
4
  # Substitua 'seu-nome-de-usuário/nome-do-modelo' pelo caminho correto do seu modelo no Hugging Face Hub
5
  MODEL_NAME = "exo-is/t5-small-60M-esg-keyword"
6
 
7
  @st.cache_resource
8
  def load_model():
9
+ tokenizer = T5Tokenizer.from_pretrained(MODEL_NAME)
10
+ model = T5ForConditionalGeneration.from_pretrained(MODEL_NAME)
11
+ return pipeline('text2text-generation', model=model, tokenizer=tokenizer)
12
 
13
+ st.title('Gerador de Texto T5')
14
 
15
  gerador = load_model()
16
 
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  streamlit==1.25.0
2
  transformers==4.33.2
3
  torch==2.0.1
4
- tokenizers==0.13.3
 
 
1
  streamlit==1.25.0
2
  transformers==4.33.2
3
  torch==2.0.1
4
+ tokenizers==0.13.3
5
+ sentencepiece==0.1.99