from transformers import AutoModelForCausalLM, AutoTokenizer
model = AutoModelForCausalLM.from_pretrained("josu/gpt-neo-pt-1.3B")
tokenizer = AutoTokenizer.from_pretrained("josu/gpt-neo-pt-1.3B")
from transformers import pipeline
generator = pipeline('text-generation', model='josu/gpt-neo-pt-1.3B')
- Downloads last month
- 20
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social
visibility and check back later, or deploy to Inference Endpoints (dedicated)
instead.