Chagrin / app.py
Canstralian's picture
Upload 2 files
2bc3020 verified
raw
history blame
3.04 kB
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer, AutoModel
import dask.dataframe as dd
from datasets import load_dataset
import torch
# Load models and tokenizer
def load_models():
# Load model 1
model_1 = AutoModel.from_pretrained("Canstralian/RedTeamAI")
# Load model 2
model_2 = AutoModel.from_pretrained("mradermacher/BashCopilot-6B-preview-GGUF")
# Load tokenizer and sequence classification model
tokenizer = AutoTokenizer.from_pretrained("bash1130/bert-base-finetuned-ynat")
model_3 = AutoModelForSequenceClassification.from_pretrained("bash1130/bert-base-finetuned-ynat")
return model_1, model_2, tokenizer, model_3
# Load dataset using Dask
def load_data():
# Example of loading a dataset using Dask (adjust paths as necessary)
splits = {'creative_content': 'data/creative_content-00000-of-00001.parquet'}
df = dd.read_parquet("hf://datasets/microsoft/orca-agentinstruct-1M-v1/" + splits["creative_content"])
return df.head()
# Function for model inference
def infer_model(input_text, model_type):
# Choose the model based on the input (you can add more models or conditions as needed)
if model_type == 'RedTeamAI':
model = models[0]
elif model_type == 'BashCopilot':
model = models[1]
elif model_type == 'BertModel':
model = models[3]
inputs = tokenizer(input_text, return_tensors="pt", padding=True, truncation=True)
outputs = model(**inputs)
return outputs.logits.argmax(dim=-1).item()
else:
return "Model type not recognized."
# If you need to generate outputs based on the models directly, you can use:
# outputs = model.generate(input_text) or other inference methods depending on the model.
return f"Model {model_type} inference not implemented yet."
# Gradio Interface setup
def build_interface():
# Load models and data
model_1, model_2, tokenizer, model_3 = load_models()
global models
models = [model_1, model_2, tokenizer, model_3]
# Load the dataset (example function, you can add more functionality)
data_preview = load_data()
print(f"Dataset preview: {data_preview}")
# Create Gradio interface
with gr.Blocks() as demo:
gr.Markdown("# Chagrin AI - Model Inference & Dataset Explorer")
# Model selection dropdown
model_type = gr.Dropdown(choices=["RedTeamAI", "BashCopilot", "BertModel"], label="Choose Model")
# Textbox for user input
input_text = gr.Textbox(label="Enter your input text")
# Button to trigger inference
result = gr.Textbox(label="Inference Result")
submit_btn = gr.Button("Run Inference")
submit_btn.click(infer_model, inputs=[input_text, model_type], outputs=result)
demo.launch()
# Run the app
if __name__ == "__main__":
build_interface()