idk_test / app.py
Last commit not found
raw
history blame
1.77 kB
import streamlit as st
from tensorflow import keras
import numpy as np
from huggingface_hub import HfFileSystem
from PIL import Image
# Authenticate and download the custom model from Hugging Face Spaces
fs = HfFileSystem()
model_path = 'dhhd255/main_model/best_model.h5'
with fs.open(model_path, 'rb') as f:
model_content = f.read()
# Save the model file to disk
with open('best_model.h5', 'wb') as f:
f.write(model_content)
# Load your custom model
model = keras.models.load_model('best_model.h5')
# Define a function that takes an image as input and uses the model for inference
def image_classifier(image):
# Preprocess the input image
image = Image.fromarray(image)
image = image.convert('L')
image = image.resize((128, 128))
image = np.array(image)
image = image / 255.0
image = np.expand_dims(image, axis=0)
image = np.expand_dims(image, axis=-1)
# Use your custom model for inference
predictions = model.predict(image)
# Get the index of the highest predicted probability
predicted_index = np.argmax(predictions[0])
# Map the index to a class label
labels = ['Healthy', 'Parkinson']
predicted_label = labels[predicted_index]
# Return the result
return predictions[0], predicted_label
# Create a Streamlit app with an image upload input
uploaded_file = st.file_uploader('Upload an image')
if uploaded_file is not None:
# Convert the UploadedFile object to a NumPy array
image = Image.open(uploaded_file)
image = np.array(image)
# Use the image for inference
predictions, predicted_label = image_classifier(image)
# Display the result
st.write(f'Predictions: {predictions}')
st.write(f'Predicted label: {predicted_label}')