eaglelandsonce's picture
Create pages/3_Codestral.py
1149e68 verified
raw
history blame
772 Bytes
import os
import streamlit as st
from clarifai.client.model import Model
# Read API key from environment variable
api_key = os.getenv("CodestralPat")
# Function to get prediction from the model
def get_model_prediction(prompt):
model_url = "https://clarifai.com/mistralai/completion/models/codestral-22b-instruct"
model = Model(url=model_url, pat=api_key)
model_prediction = model.predict_by_bytes(prompt.encode(), input_type="text")
return model_prediction.outputs[0].data.text.raw
# Streamlit interface
st.title("Codestral with Clarifai")
prompt = st.text_input("Enter your prompt:", "What's the future of AI?")
if st.button("Get Prediction"):
prediction = get_model_prediction(prompt)
st.write("Model Prediction:")
st.write(prediction)