SneakerAI / app.py
thejagstudio's picture
Update app.py
3a940b3 verified
raw
history blame
2.97 kB
from flask import Flask, request, render_template, jsonify
from huggingface_hub import hf_hub_download
import cv2
import numpy as np
import tensorflow as tf
from tensorflow import keras
import pandas as pd
import base64
import json
# Initialize Flask app
app = Flask(__name__)
# Load model and data at startup
model = keras.models.load_model("./sneaker_category_predictor_v2.keras")
# model = tf.keras.models.load_model("./sneaker_category_predictor_v2.keras")
# model_path = hf_hub_download(repo_id="thejagstudio/SneakerAI", filename="sneaker_category_predictor_v2.h5", repo_type="model")
# model = tf.keras.models.load_model(model_path)
# model = load_model(model_path)
# Define expected columns for one-hot encoding
with open("metadata.json", "r") as f:
METADATA_COLUMNS = json.load(f)
def encode_metadata(data):
# Create DataFrame with single row
df = pd.DataFrame({k: [v.lower()] for k, v in data.items()})
# Initialize empty DataFrame with all possible columns
encoded = pd.DataFrame()
# Encode each feature maintaining consistent columns
for feature, possible_values in METADATA_COLUMNS.items():
feature_encoded = pd.get_dummies(df[feature], prefix=feature)
# Add missing columns with 0s
for value in possible_values:
col_name = f"{feature}_{value}"
if col_name not in feature_encoded.columns:
feature_encoded[col_name] = 0
encoded = pd.concat([encoded, feature_encoded], axis=1)
# Ensure consistent column order
all_columns = [
f"{feat}_{val}" for feat, vals in METADATA_COLUMNS.items() for val in vals
]
encoded = encoded.reindex(columns=all_columns, fill_value=0)
return encoded.values.astype(np.float32)
@app.route("/")
def index():
global METADATA_COLUMNS
return render_template("index.html", metadata=METADATA_COLUMNS)
@app.route("/predict", methods=["POST"])
def predict():
try:
data = request.json
# Process image
img_data = base64.b64decode(data["image"])
img_array = np.frombuffer(img_data, np.uint8)
img = cv2.imdecode(img_array, cv2.IMREAD_COLOR)
img = cv2.resize(img, (224, 224))
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = img / 255.0
img = np.expand_dims(img, axis=0)
# Encode metadata with consistent columns
metadata = encode_metadata(
{
"brand": data["brand"],
"color": data["color"],
"gender": data["gender"],
"midsole": data["midsole"],
"upperMaterial": data["upperMaterial"],
}
)
# Make prediction
predictions = model.predict([img, metadata])
categories = [
"Lifestyle",
"Running",
"Other",
"Cleat",
"Sandal",
"Basketball",
"Boot",
"Skateboarding",
]
confidenceList = predictions[0].tolist()
return jsonify({"categories": categories, "confidence": confidenceList})
except Exception as e:
return jsonify({"error": str(e)}), 400
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860)