import gradio as gr # Use a pipeline as a high-level helper from transformers import pipeline def llama3_1_8B(question): messages = [ {"role": "user", "content": question}, ] pipe = pipeline("text-generation", model="meta-llama/Llama-3.1-8B-Instruct") responses = pipe(messages) return str(responses) def greet(name): return "Hello " + name + "!!???" demo = gr.Interface(fn=llama3_1_8B, inputs="text", outputs="text") demo.launch()