Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
import requests | |
model_id = "gpt2" # You can replace this with any model of your choice | |
def fetch_s3_text_file(url): | |
try: | |
response = requests.get(url) | |
response.raise_for_status() # Raise an HTTPError for bad responses (4xx and 5xx) | |
return response.text | |
except requests.exceptions.RequestException as e: | |
print(f"Error fetching the file: {e}") | |
return None | |
#access_token = fetch_s3_text_file("https://mybookbooks.s3.amazonaws.com/key.txt") | |
generator = pipeline("text-generation", model=model_id) | |
# Define the function to process the input and generate text | |
def generate_text(prompt): | |
response = generator(prompt, max_length=100, num_return_sequences=1) | |
generated_text = response[0]['generated_text'] | |
return generated_text | |
demo = gr.Interface(fn=generate_text, inputs="text", outputs="text") | |
demo.launch() | |