BootyShakerAI / inference.py
ChromiumPlutoniumAI's picture
Create inference.py
1f319a5 verified
raw
history blame contribute delete
551 Bytes
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
def generate_text(prompt, max_length=100):
model = AutoModelForCausalLM.from_pretrained("./results")
tokenizer = AutoTokenizer.from_pretrained("./results")
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(
**inputs,
max_length=max_length,
num_return_sequences=1,
temperature=0.7,
top_p=0.9,
do_sample=True
)
return tokenizer.decode(outputs[0], skip_special_tokens=True)