import nltk from transformers import AutoModelForSeq2SeqLM, AutoTokenizer # Download necessary NLTK data nltk.download('punkt') # Load the OmniParser model and tokenizer model_name = "microsoft/OmniParser" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) # SQLBot function to generate SQL commands def generate_sql_with_omnparser(query): # Encode the input query inputs = tokenizer.encode(query, return_tensors="pt") # Generate SQL command outputs = model.generate(inputs, max_length=50) # Decode the generated SQL command sql_command = tokenizer.decode(outputs[0], skip_special_tokens=True) return sql_command # SQLBot function with personality def sqlbot(query): gpt_sql_command = generate_sql_with_omnparser(query) # Adding personality to the bot if "create table" in query.lower(): response = f"Alright, rolling up my sleeves to create that table for you! Here it is:\n{gpt_sql_command}" elif "select" in query.lower(): response = f"Got it! Fetching the data you need:\n{gpt_sql_command}" elif "show tables" in query.lower(): response = f"Let me show you all the tables you've got:\n{gpt_sql_command}" elif "insert" in query.lower(): response = f"Great! Adding new records as requested:\n{gpt_sql_command}" elif "update" in query.lower(): response = f"Time to make some updates! Here you go:\n{gpt_sql_command}" elif "delete" in query.lower(): response = f"Okay, we're deleting those records:\n{gpt_sql_command}" else: response = f"Here's what I found:\n{gpt_sql_command}" return response # Example usage user_query = "Create table employees with name age department" generated_sql = sqlbot(user_query) print(generated_sql) # Another example usage user_query = "Insert into users (name, age) values ('Alice', 30)" generated_sql = sqlbot(user_query) print(generated_sql)