tinyllama-Demo / app.py
Vineedhar's picture
Update app.py
e7b358d verified
raw
history blame
933 Bytes
import gradio as gr
# Load the model from Hugging Face
def load_model():
try:
model_interface = gr.load("huggingface/TinyLlama/TinyLlama-1.1B-Chat-v1.0")
return model_interface
except ValueError as e:
print(f"Error loading the model: {e}")
return None
# Create a Gradio interface with custom title and logo
def main():
model_interface = load_model()
if model_interface is None:
print("Failed to load the model. Exiting.")
return
with gr.Blocks() as demo:
# Add the title
gr.Markdown("# TinyLlama Demo")
# Add the logo
gr.Image("orYx logo.png", elem_id="logo", show_label=False, interactive=False)
# Embed the model interface inside a Row
with gr.Row():
model_interface.render() # Use render to include the prebuilt interface
demo.launch()
if __name__ == "__main__":
main()