# -*- coding:UTF-8 -*-
#!/usr/bin/env python
import numpy as np
import gradio as gr
import roop.globals
from roop.core import (
start,
decode_execution_providers,
suggest_max_memory,
suggest_execution_threads,
)
from roop.processors.frame.core import get_frame_processors_modules
from roop.utilities import normalize_output_path
import os
from PIL import Image
from datetime import datetime
from huggingface_hub import HfApi, login
from datasets import load_dataset, Dataset
import json
import shutil
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
class FaceIntegrDataset:
def __init__(self, repo_id="Arrcttacsrks/face_integrData"):
# Get token from environment variable
self.token = os.getenv('hf_token')
if not self.token:
raise ValueError("HF_TOKEN environment variable is not set")
self.repo_id = repo_id
self.api = HfApi()
# Login to Hugging Face
login(self.token)
# Create local temp directory for organizing files
self.temp_dir = "temp_dataset"
os.makedirs(self.temp_dir, exist_ok=True)
def create_date_folder(self):
"""Create folder structure based on current date"""
current_date = datetime.now().strftime("%Y-%m-%d")
folder_path = os.path.join(self.temp_dir, current_date)
os.makedirs(folder_path, exist_ok=True)
return folder_path, current_date
def save_metadata(self, source_path, target_path, output_path, timestamp):
"""Save metadata for the face swap operation"""
metadata = {
"timestamp": timestamp,
"source_image": source_path,
"target_image": target_path,
"output_image": output_path,
"date_created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
return metadata
def upload_to_hf(self, local_folder, date_folder):
"""Upload files to Hugging Face dataset"""
try:
# Upload the files
self.api.upload_folder(
folder_path=local_folder,
repo_id=self.repo_id,
repo_type="dataset",
path_in_repo=date_folder
)
return True
except Exception as e:
print(f"Error uploading to Hugging Face: {str(e)}")
return False
def swap_face(source_file, target_file, doFaceEnhancer):
try:
# Initialize dataset handler
dataset_handler = FaceIntegrDataset()
# Create date-based folder
folder_path, date_folder = dataset_handler.create_date_folder()
# Generate timestamp for unique identification
timestamp = datetime.now().strftime("%S%M%H%d%m%Y")
# Save input images with timestamp in folder
source_path = os.path.join(folder_path, f"source_{timestamp}.jpg")
target_path = os.path.join(folder_path, f"target_{timestamp}.jpg")
output_path = os.path.join(folder_path, f"Image{timestamp}.jpg")
# Save the input images
source_image = Image.fromarray(source_file)
source_image.save(source_path)
target_image = Image.fromarray(target_file)
target_image.save(target_path)
print("source_path: ", source_path)
print("target_path: ", target_path)
# Set global paths
roop.globals.source_path = source_path
roop.globals.target_path = target_path
roop.globals.output_path = normalize_output_path(
roop.globals.source_path,
roop.globals.target_path,
output_path
)
# Configure face processing options
if doFaceEnhancer:
roop.globals.frame_processors = ["face_swapper", "face_enhancer"]
else:
roop.globals.frame_processors = ["face_swapper"]
# Set global parameters
roop.globals.headless = True
roop.globals.keep_fps = True
roop.globals.keep_audio = True
roop.globals.keep_frames = False
roop.globals.many_faces = False
roop.globals.video_encoder = "libx264"
roop.globals.video_quality = 18
roop.globals.max_memory = suggest_max_memory()
roop.globals.execution_providers = decode_execution_providers(["cuda"])
roop.globals.execution_threads = suggest_execution_threads()
print(
"start process",
roop.globals.source_path,
roop.globals.target_path,
roop.globals.output_path,
)
# Check frame processors
for frame_processor in get_frame_processors_modules(roop.globals.frame_processors):
if not frame_processor.pre_check():
return
# Process the face swap
start()
# Save metadata
metadata = dataset_handler.save_metadata(
f"source_{timestamp}.jpg",
f"target_{timestamp}.jpg",
f"Image{timestamp}.jpg",
timestamp
)
# Save metadata to JSON file in the same folder
metadata_path = os.path.join(folder_path, f"metadata_{timestamp}.json")
with open(metadata_path, 'w') as f:
json.dump(metadata, f, indent=4)
# Upload to Hugging Face
upload_success = dataset_handler.upload_to_hf(folder_path, date_folder)
if upload_success:
print(f"Successfully uploaded files to dataset {dataset_handler.repo_id}")
else:
print("Failed to upload files to Hugging Face dataset")
# Clean up temp folder after upload
shutil.rmtree(folder_path)
return output_path
except Exception as e:
print(f"Error in face swap process: {str(e)}")
raise gr.Error("Face swap failed. Please check your Hugging Face token and try again.")
# Gradio interface setup
title = "Face - Интегратор"
description = r"""
The application will save the image history to Hugging Face dataset using the environment variable token.
"""
article = r"""
"""
app = gr.Interface(
fn=swap_face,
title=title,
description=description,
article=article,
inputs=[
gr.Image(),
gr.Image(),
gr.Checkbox(label="Применить алгоритм?", info="Улучшение качества изображения")
],
outputs="image"
)
app.launch()