|
from flask import Flask, request, render_template, jsonify, send_from_directory |
|
import os |
|
import torch |
|
import numpy as np |
|
import cv2 |
|
from segment_anything import sam_model_registry, SamPredictor |
|
from werkzeug.utils import secure_filename |
|
import warnings |
|
|
|
|
|
app = Flask( |
|
__name__, |
|
template_folder='templates', |
|
static_folder='static' |
|
) |
|
app.config['UPLOAD_FOLDER'] = os.path.join('static', 'uploads') |
|
os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True) |
|
|
|
|
|
MODEL_TYPE = "vit_b" |
|
MODEL_PATH = os.path.join('models', 'sam_vit_b_01ec64.pth') |
|
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') |
|
|
|
print("Chargement du modèle SAM...") |
|
try: |
|
state_dict = torch.load(MODEL_PATH, map_location="cpu", weights_only=True) |
|
except TypeError: |
|
with warnings.catch_warnings(): |
|
warnings.simplefilter("ignore", category=UserWarning) |
|
state_dict = torch.load(MODEL_PATH, map_location="cpu") |
|
|
|
|
|
sam = sam_model_registry[MODEL_TYPE]() |
|
sam.load_state_dict(state_dict, strict=False) |
|
sam.to(device=device) |
|
predictor = SamPredictor(sam) |
|
print("Modèle SAM chargé avec succès!") |
|
|
|
|
|
@app.route('/', methods=['GET', 'POST']) |
|
def index(): |
|
if request.method == 'POST': |
|
if 'image' not in request.files: |
|
return "Aucun fichier sélectionné", 400 |
|
file = request.files['image'] |
|
if file.filename == '': |
|
return "Nom de fichier vide", 400 |
|
filename = secure_filename(file.filename) |
|
filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename) |
|
file.save(filepath) |
|
|
|
return render_template('index.html', uploaded_image=filename) |
|
return render_template('index.html') |
|
|
|
|
|
@app.route('/uploads/<filename>') |
|
def uploaded_file(filename): |
|
return send_from_directory(app.config['UPLOAD_FOLDER'], filename) |
|
|
|
|
|
if __name__ == '__main__': |
|
app.run(debug=True, host='0.0.0.0', port=5000) |
|
|