Spaces:
Running
Running
File size: 9,353 Bytes
3179874 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 |
# -*- coding:UTF-8 -*-
#!/usr/bin/env python
import numpy as np
import gradio as gr
import roop.globals
from roop.core import (
start,
decode_execution_providers,
suggest_max_memory,
suggest_execution_threads,
)
from roop.processors.frame.core import get_frame_processors_modules
from roop.utilities import normalize_output_path
import os
from PIL import Image
from datetime import datetime
from huggingface_hub import HfApi, login
from datasets import load_dataset, Dataset
import json
import shutil
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
class FaceIntegrDataset:
def __init__(self, repo_id="Arrcttacsrks/face_integrData"):
# Get token from environment variable
self.token = os.getenv('hf_token')
if not self.token:
raise ValueError("HF_TOKEN environment variable is not set")
self.repo_id = repo_id
self.api = HfApi()
# Login to Hugging Face
login(self.token)
# Create local temp directory for organizing files
self.temp_dir = "temp_dataset"
os.makedirs(self.temp_dir, exist_ok=True)
def create_date_folder(self):
"""Create folder structure based on current date"""
current_date = datetime.now().strftime("%Y-%m-%d")
folder_path = os.path.join(self.temp_dir, current_date)
os.makedirs(folder_path, exist_ok=True)
return folder_path, current_date
def save_metadata(self, source_path, target_path, output_path, timestamp):
"""Save metadata for the face swap operation"""
metadata = {
"timestamp": timestamp,
"source_image": source_path,
"target_image": target_path,
"output_image": output_path,
"date_created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
return metadata
def upload_to_hf(self, local_folder, date_folder):
"""Upload files to Hugging Face dataset"""
try:
# Upload the files
self.api.upload_folder(
folder_path=local_folder,
repo_id=self.repo_id,
repo_type="dataset",
path_in_repo=date_folder
)
return True
except Exception as e:
print(f"Error uploading to Hugging Face: {str(e)}")
return False
def swap_face(source_file, target_file, doFaceEnhancer):
folder_path = None
try:
# Initialize dataset handler
dataset_handler = FaceIntegrDataset()
# Create date-based folder
folder_path, date_folder = dataset_handler.create_date_folder()
# Generate timestamp for unique identification
timestamp = datetime.now().strftime("%S%M%H%d%m%Y")
# Save input images with timestamp in folder
source_path = os.path.join(folder_path, f"source_{timestamp}.jpg")
target_path = os.path.join(folder_path, f"target_{timestamp}.jpg")
output_path = os.path.join(folder_path, f"Image{timestamp}.jpg")
# Save the input images
if source_file is None or target_file is None:
raise ValueError("Source and target images are required")
source_image = Image.fromarray(source_file)
source_image.save(source_path)
target_image = Image.fromarray(target_file)
target_image.save(target_path)
print("source_path: ", source_path)
print("target_path: ", target_path)
# Set global paths
roop.globals.source_path = source_path
roop.globals.target_path = target_path
roop.globals.output_path = normalize_output_path(
roop.globals.source_path,
roop.globals.target_path,
output_path
)
# Configure face processing options
if doFaceEnhancer:
roop.globals.frame_processors = ["face_swapper", "face_enhancer"]
else:
roop.globals.frame_processors = ["face_swapper"]
# Set global parameters
roop.globals.headless = True
roop.globals.keep_fps = True
roop.globals.keep_audio = True
roop.globals.keep_frames = False
roop.globals.many_faces = False
roop.globals.video_encoder = "libx264"
roop.globals.video_quality = 18
roop.globals.max_memory = suggest_max_memory()
roop.globals.execution_providers = decode_execution_providers(["cuda"])
roop.globals.execution_threads = suggest_execution_threads()
print(
"start process",
roop.globals.source_path,
roop.globals.target_path,
roop.globals.output_path,
)
# Check frame processors
for frame_processor in get_frame_processors_modules(roop.globals.frame_processors):
if not frame_processor.pre_check():
return None
# Process the face swap
start()
# Save metadata
metadata = dataset_handler.save_metadata(
f"source_{timestamp}.jpg",
f"target_{timestamp}.jpg",
f"Image{timestamp}.jpg",
timestamp
)
# Save metadata to JSON file in the same folder
metadata_path = os.path.join(folder_path, f"metadata_{timestamp}.json")
with open(metadata_path, 'w') as f:
json.dump(metadata, f, indent=4)
# Upload to Hugging Face
upload_success = dataset_handler.upload_to_hf(folder_path, date_folder)
if upload_success:
print(f"Successfully uploaded files to dataset {dataset_handler.repo_id}")
else:
print("Failed to upload files to Hugging Face dataset")
# Read the output image before cleaning up
if os.path.exists(output_path):
output_image = Image.open(output_path)
output_array = np.array(output_image)
# Clean up temp folder after reading the image
shutil.rmtree(folder_path)
return output_array
else:
print("Output image not found")
if folder_path and os.path.exists(folder_path):
shutil.rmtree(folder_path)
return None
except Exception as e:
print(f"Error in face swap process: {str(e)}")
if folder_path and os.path.exists(folder_path):
shutil.rmtree(folder_path)
raise gr.Error(f"Face swap failed: {str(e)}")
def create_interface():
# Create custom style
custom_css = """
.container {
max-width: 1200px;
margin: auto;
padding: 20px;
}
.output-image {
min-height: 400px;
border: 1px solid #ccc;
border-radius: 8px;
padding: 10px;
}
"""
# Gradio interface setup
title = "Face - Интегратор"
description = r"""
The application will save the image history to Hugging Face dataset using the environment variable token.
Please upload source and target images to begin the face swap process.
"""
article = r"""
<div style="text-align: center; max-width: 650px; margin: 40px auto;">
<p>
This tool performs face swapping with optional enhancement.
The processed images are automatically saved to the Hugging Face dataset.
</p>
</div>
"""
# Create Gradio interface with improved layout
with gr.Blocks(title=title, css=custom_css) as app:
gr.Markdown(f"<h1 style='text-align: center;'>{title}</h1>")
gr.Markdown(description)
with gr.Row():
with gr.Column(scale=1):
source_image = gr.Image(
label="Source Image",
type="numpy",
sources=["upload"]
)
with gr.Column(scale=1):
target_image = gr.Image(
label="Target Image",
type="numpy",
sources=["upload"]
)
with gr.Column(scale=1):
output_image = gr.Image(
label="Output Image",
type="numpy",
interactive=False,
elem_classes="output-image"
)
with gr.Row():
enhance_checkbox = gr.Checkbox(
label="Применить алгоритм?",
info="Улучшение качества изображения",
value=False
)
with gr.Row():
process_btn = gr.Button(
"Process Face Swap",
variant="primary",
size="lg"
)
# Set up the processing event
process_btn.click(
fn=swap_face,
inputs=[source_image, target_image, enhance_checkbox],
outputs=output_image,
api_name="swap_face"
)
gr.Markdown(article)
return app
def main():
# Create and launch the interface
app = create_interface()
app.launch(share=False)
if __name__ == "__main__":
main() |