Update app.py
Browse files
app.py
CHANGED
@@ -1,16 +1,19 @@
|
|
|
|
1 |
import streamlit as st
|
2 |
from clarifai.client.model import Model
|
3 |
|
|
|
|
|
|
|
4 |
# Function to get prediction from the model
|
5 |
def get_model_prediction(prompt):
|
6 |
model_url = "https://clarifai.com/mistralai/completion/models/codestral-22b-instruct"
|
7 |
-
|
8 |
-
model = Model(url=model_url, pat=pat)
|
9 |
model_prediction = model.predict_by_bytes(prompt.encode(), input_type="text")
|
10 |
return model_prediction.outputs[0].data.text.raw
|
11 |
|
12 |
# Streamlit interface
|
13 |
-
st.title("
|
14 |
st.write("Enter a prompt and get a prediction from the Clarifai model.")
|
15 |
|
16 |
prompt = st.text_input("Enter your prompt:", "What's the future of AI?")
|
|
|
1 |
+
import os
|
2 |
import streamlit as st
|
3 |
from clarifai.client.model import Model
|
4 |
|
5 |
+
# Read API key from environment variable
|
6 |
+
api_key = os.getenv("CLARIFAI_PAT")
|
7 |
+
|
8 |
# Function to get prediction from the model
|
9 |
def get_model_prediction(prompt):
|
10 |
model_url = "https://clarifai.com/mistralai/completion/models/codestral-22b-instruct"
|
11 |
+
model = Model(url=model_url, pat=api_key)
|
|
|
12 |
model_prediction = model.predict_by_bytes(prompt.encode(), input_type="text")
|
13 |
return model_prediction.outputs[0].data.text.raw
|
14 |
|
15 |
# Streamlit interface
|
16 |
+
st.title("AI Model Prediction with Clarifai")
|
17 |
st.write("Enter a prompt and get a prediction from the Clarifai model.")
|
18 |
|
19 |
prompt = st.text_input("Enter your prompt:", "What's the future of AI?")
|