Spaces:
Sleeping
Sleeping
File size: 2,153 Bytes
dd9e5f1 2be3343 dd9e5f1 2be3343 dd9e5f1 d0e2891 2be3343 dd9e5f1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
from example_strings import example1, example2, example3
# tokenizer6B = AutoTokenizer.from_pretrained(f"NumbersStation/nsql-6B")
# model6B = AutoModelForCausalLM.from_pretrained(f"NumbersStation/nsql-6B")
# tokenizer2B = AutoTokenizer.from_pretrained(f"NumbersStation/nsql-2B")
# model2B = AutoModelForCausalLM.from_pretrained(f"NumbersStation/nsql-2B")
# tokenizer350M = AutoTokenizer.from_pretrained(f"NumbersStation/nsql-2B")
# model350M = AutoModelForCausalLM.from_pretrained(f"NumbersStation/nsql-2B")
def load_model(model_name: str):
tokenizer = AutoTokenizer.from_pretrained(f"NumbersStation/{model_name}")
model = AutoModelForCausalLM.from_pretrained(f"NumbersStation/{model_name}")
return tokenizer, model
def infer(input_text, model_choice):
tokenizer, model = load_model(model_choice)
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
generated_ids = model.generate(input_ids, max_length=500)
return (tokenizer.decode(generated_ids[0], skip_special_tokens=True))
description = """The NSQL model family was published by [Numbers Station](https://www.numbersstation.ai/) and is available in three flavors:
- [nsql-6B](https://huggingface.co/NumbersStation/nsql-6B)
- [nsql-2B](https://huggingface.co/NumbersStation/nsql-2B)
- [nsql-350M]((https://huggingface.co/NumbersStation/nsql-350M))
This demo let's you choose which one you want to use and provides the three examples you can also find in their model cards.
In general you should first provide the table schemas of the tables you have questions about and then prompt it with a natural language question.
The model will then generate a SQL query that you can run against your database.
"""
iface = gr.Interface(
title="Text to SQL with NSQL",
description=description,
fn=infer,
inputs=["text",
gr.Dropdown(["nsql-6B", "nsql-2B", "nsql-350M"], value="nsql-6B")],
outputs="text",
examples=[[example1, "nsql-350M"],
[example2, "nsql-2B"],
[example3, "nsql-350M"]])
iface.launch() |