Flux1Dev / app.py
Akbartus's picture
Update app.py
31fd630 verified
raw
history blame
945 Bytes
import gradio as gr
import torch
from transformers import pipeline
# Load your model pipeline (e.g., text generation or similar tasks)
model = pipeline("text-generation", model="black-forest-labs/FLUX.1-schnell")
# Define a function that includes parameters
def generate_text(prompt, seed, num_inference_steps):
# Set the random seed for reproducibility
generator = torch.manual_seed(seed)
# Generate text with additional parameters
output = model(prompt, num_return_sequences=1, max_length=256,
do_sample=True, generator=generator)
return output[0]['generated_text']
# Create a Gradio interface
interface = gr.Interface(
fn=generate_text,
inputs=[
gr.Textbox(label="Input Prompt"),
gr.Number(label="Seed", default=0),
gr.Number(label="Inference Steps", default=50)
],
outputs=gr.Textbox(label="Generated Text")
)
# Launch the interface
interface.launch()