keerthanaBasavaraj's picture
Initial setup for nlp to sql with prem-1B-SQL model
f78a3ab
raw
history blame
758 Bytes
from transformers import AutoTokenizer, AutoModelForCausalLM
def load_model(model_name="premai-io/prem-1B-SQL"):
"""
Loads the SQL generation model and tokenizer from Hugging Face.
"""
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
return tokenizer, model
def generate_sql(question, prompt_inputs, tokenizer, model, device="cpu"):
"""
Generates an SQL query based on the question and schema.
"""
prompt = prompt_inputs["formatted_prompt"]
inputs = tokenizer(prompt, return_tensors="pt").to(device)
outputs = model.generate(
**inputs,
max_new_tokens=128,
)
return tokenizer.decode(outputs[0], skip_special_tokens=True)