polymers / app.py
rish13's picture
Update app.py
0eed6d3 verified
raw
history blame
1.21 kB
import gradio as gr
from transformers import pipeline
import torch
# Check if a GPU is available
device = 0 if torch.cuda.is_available() else -1
# Load the text-generation pipeline with the appropriate device
model = pipeline(
"text-generation",
model="rish13/polymers",
device=device # Automatically use GPU if available, otherwise CPU
)
def generate_response(prompt):
# Generate text from the model
response = model(
prompt,
max_length=50, # Adjusted to generate shorter text
num_return_sequences=1,
temperature=0.5, # Lowered to reduce randomness
top_k=50, # Limiting the next word selection
top_p=0.9 # Cumulative probability threshold
)
# Get the generated text from the response
generated_text = response[0]['generated_text']
return generated_text
# Define the Gradio interface
interface = gr.Interface(
fn=generate_response,
inputs=gr.Textbox(lines=2, placeholder="Enter your prompt here...", label="Prompt"),
outputs="text",
title="Polymer Knowledge Model",
description="A model fine-tuned for generating text related to polymers."
)
# Launch the interface
interface.launch()