|
import os |
|
import streamlit as st |
|
from clarifai.client.model import Model |
|
|
|
|
|
api_key = os.getenv("CodestralPat") |
|
|
|
|
|
def get_model_prediction(prompt): |
|
model_url = "https://clarifai.com/mistralai/completion/models/codestral-22b-instruct" |
|
model = Model(url=model_url, pat=api_key) |
|
model_prediction = model.predict_by_bytes(prompt.encode(), input_type="text") |
|
return model_prediction.outputs[0].data.text.raw |
|
|
|
|
|
st.title("Codestral with Clarifai") |
|
|
|
prompt = st.text_input("Enter your prompt:", "What's the future of AI?") |
|
|
|
if st.button("Get Prediction"): |
|
prediction = get_model_prediction(prompt) |
|
st.write("Model Prediction:") |
|
st.write(prediction) |
|
|