eaglelandsonce's picture
Update app.py
56c31a4 verified
raw
history blame
776 Bytes
import streamlit as st
from clarifai.client.model import Model
# Function to get prediction from the model
def get_model_prediction(prompt):
model_url = "https://clarifai.com/mistralai/completion/models/codestral-22b-instruct"
pat = "CodestralPat"
model = Model(url=model_url, pat=pat)
model_prediction = model.predict_by_bytes(prompt.encode(), input_type="text")
return model_prediction.outputs[0].data.text.raw
# Streamlit interface
st.title("Codestral on Clarifai")
st.write("Enter a prompt and get a prediction from the Clarifai model.")
prompt = st.text_input("Enter your prompt:", "What's the future of AI?")
if st.button("Get Prediction"):
prediction = get_model_prediction(prompt)
st.write("Model Prediction:")
st.write(prediction)