import streamlit as st from transformers import pipeline # Ensure the latest Transformers library is used st.write("Initializing model...") try: pipe = pipeline("image-text-to-text", model="deepseek-ai/deepseek-vl2-small", trust_remote_code=True) except ValueError as e: st.error(f"Model loading failed: {e}") st.stop() st.title("DeepSeek-VL2 Chatbot") # User input user_input = st.text_input("Enter your message:", "") if st.button("Generate Response"): if user_input: messages = [{"role": "user", "content": user_input}] result = pipe(messages) response = result[0]["generated_text"] if isinstance(result, list) else result st.text_area("Model Response:", response, height=200) else: st.warning("Please enter a message.")