Spaces:
Runtime error
Runtime error
File size: 1,301 Bytes
7c2904b 4be8187 9ffc3ef 7c2904b 9ffc3ef 4be8187 d896de4 74a0653 d896de4 e55a05c 7c2904b e55a05c d896de4 7c2904b e55a05c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
import gradio as gr
from transformers import pipeline, AutoConfig
import logging
# Temporary workaround: Extend the Logger class to include warning_once
def warning_once(self, msg, *args, **kwargs):
if msg not in self._warned:
self.warning(msg, *args, **kwargs)
self._warned.add(msg)
logging.Logger.warning_once = warning_once
logging.Logger._warned = set()
# Load the model configuration with trust_remote_code to execute local configuration
config = AutoConfig.from_pretrained('tiiuae/falcon-40b-instruct', trust_remote_code=True)
# Load the model once when the script starts using the loaded config
generator = pipeline('text-generation', model='tiiuae/falcon-40b-instruct', config=config)
def generate_text(prompt):
# Use the preloaded model
return generator(prompt, max_length=100)[0]['generated_text']
def main():
with gr.Blocks() as demo:
gr.Markdown("## Text Generation Model")
gr.Markdown("This model generates text based on the input prompt. Powered by Hugging Face transformers.")
prompt = gr.Textbox(lines=2, placeholder="Type your prompt here...")
output = gr.Text(label="Generated Text")
prompt.change(fn=generate_text, inputs=prompt, outputs=output)
demo.launch()
if __name__ == "__main__":
main() |