bug_bot / app.py
Canstralian's picture
Update app.py
4daff63 verified
raw
history blame
1.85 kB
import gradio as gr
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
import requests
import pandas as pd
import numpy as np
from datasets import load_dataset
# Load the model and tokenizer from Hugging Face Hub
model_path = "Canstralian/pentest_ai" # Replace with your model path if needed
model = AutoModelForCausalLM.from_pretrained(model_path)
tokenizer = AutoTokenizer.from_pretrained(model_path)
# Function to handle user inputs and generate responses
def generate_text(instruction):
# Encode the input text to token IDs
inputs = tokenizer.encode(instruction, return_tensors='pt', truncation=True, max_length=512)
# Generate the output text
outputs = model.generate(inputs, max_length=150, num_beams=5, temperature=0.7, top_p=0.95, do_sample=True)
# Decode the output and return the response
output_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return output_text
# Function to load a sample dataset (this can be replaced with any dataset)
def load_sample_data():
# Load a sample dataset from Hugging Face Datasets
dataset = load_dataset("imdb", split="train[:5]")
df = pd.DataFrame(dataset)
return df.head() # Show a preview of the first 5 entries
# Gradio interface to interact with the text generation function
iface = gr.Interface(
fn=generate_text,
inputs=gr.Textbox(lines=2, placeholder="Enter your question or prompt here..."),
outputs="text",
live=True,
title="Pentest AI Text Generator",
description="Generate text using a fine-tuned model for pentesting-related queries."
)
# Gradio interface for viewing the sample dataset (optional)
data_viewer = gr.Interface(fn=load_sample_data, inputs=[], outputs="dataframe", title="Sample Dataset Viewer")
# Launch the interfaces
iface.launch()
data_viewer.launch()