diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..00db00150c9676cf1ab65280c94841bc7dc10285 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,40 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text +checkpoints/model.pth filter=lfs diff=lfs merge=lfs -text +checkpoints/efficientnet.onnx filter=lfs diff=lfs merge=lfs -textvideos/0317.mp4 filter=lfs diff=lfs merge=lfs -text +videos/celeb_synthesis.mp4 filter=lfs diff=lfs merge=lfs -text +images/lady.png filter=lfs diff=lfs merge=lfs -text +*.ext filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..05e01995d6197b3a725a4b6f72255ca0ef6d14a7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +checkpoints/RawNet2.pth +deepfake/ +.gradio/ \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..45c7396e3284b6f292c6b3429c3d2df2ecda013a --- /dev/null +++ b/README.md @@ -0,0 +1,125 @@ +# DeepSecure-AI + +DeepSecure-AI is a powerful open-source tool designed to detect fake images, videos, and audios. Utilizing state-of-the-art deep learning techniques like EfficientNetV2 and MTCNN, DeepSecure-AI offers frame-by-frame video analysis, enabling high-accuracy deepfake detection. It's developed with a focus on ease of use, making it accessible for researchers, developers, and security analysts... + +--- + +## Features + +- Multimedia Detection: Detect deepfakes in images, videos, and audio files using a unified platform. +- High Accuracy: Leverages EfficientNetV2 for enhanced prediction performance and accurate results. +- Real-Time Video Analysis: Frame-by-frame analysis of videos with automatic face detection. +- User-Friendly Interface: Easy-to-use interface built with Gradio for uploading and processing media files. +- Open Source: Completely open source under the MIT license, making it available for developers to extend and improve. + +--- + +## Demo-Data + +You can test the deepfake detection capabilities of DeepSecure-AI by uploading your video files. The tool will analyze each frame of the video, detect faces, and determine the likelihood of the video being real or fake. + +Examples: +1. [Video1-fake-1-ff.mp4](#) +2. [Video6-real-1-ff.mp4](#) + +--- + +## How It Works + +DeepSecure-AI uses the following architecture: + +1. Face Detection: + The [MTCNN](https://arxiv.org/abs/1604.02878) model detects faces in each frame of the video. If no face is detected, it will use the previous frame's face to ensure accuracy. + +2. Fake vs. Real Classification: + Once the face is detected, it's resized and fed into the [EfficientNetV2](https://arxiv.org/abs/2104.00298) deep learning model, which determines the likelihood of the frame being real or fake. + +3. Fake Confidence: + A final prediction is generated as a percentage score, indicating the confidence that the media is fake. + +4. Results: + DeepSecure-AI provides an output video, highlighting the detected faces and a summary of whether the input is classified as real or fake. + +--- + +## Project Setup + +### Prerequisites + +Ensure you have the following installed: + +- Python 3.10 +- Gradio (pip install gradio) +- TensorFlow (pip install tensorflow) +- OpenCV (pip install opencv-python) +- PyTorch (pip install torch torchvision torchaudio) +- facenet-pytorch (pip install facenet-pytorch) +- MoviePy (pip install moviepy) + +### Installation + +1. Clone the repository: + git clone https://github.com/Divith123/DeepSecure-AI.git + cd DeepSecure-AI + + +2. Install required dependencies: + pip install -r requirements.txt + + +3. Download the pre-trained model weights for EfficientNetV2 and place them in the project folder. + +### Running the Application + +1. Launch the Gradio interface: + python app.py + + +2. The web interface will be available locally. You can upload a video, and DeepSecure-AI will analyze and display results. + +--- + +## Example Usage + +Upload a video or image to DeepSecure-AI to detect fake media. Here are some sample predictions: + +- Video Analysis: The tool will detect faces from each frame and classify whether the video is fake or real. +- Result Output: A GIF or MP4 file with the sequence of detected faces and classification result will be provided. + +--- + +## Technologies Used + +- TensorFlow: For building and training deep learning models. +- EfficientNetV2: The core model for image and video classification. +- MTCNN: For face detection in images and videos. +- OpenCV: For video processing and frame manipulation. +- MoviePy: For video editing and result generation. +- Gradio: To create a user-friendly interface for interacting with the deepfake detector. + +--- + +## License + +This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. + +--- + +## Contributions + +Contributions are welcome! If you'd like to improve the tool, feel free to submit a pull request or raise an issue. + +For more information, check the [Contribution Guidelines](CONTRIBUTING.md). + +--- + +## References +- Li et al. (2020): [Celeb-DF(V2)](https://arxiv.org/abs/2008.06456) +- Rossler et al. (2019): [FaceForensics++](https://arxiv.org/abs/1901.08971) +- Timesler (2020): [Facial Recognition Model in PyTorch](https://www.kaggle.com/timesler/facial-recognition-model-in-pytorch) + +--- + +### Disclaimer + +DeepSecure-AI is a research project and is designed for educational purposes.Please use responsibly and always give proper credit when utilizing the model in your work. diff --git a/__pycache__/demo.txt b/__pycache__/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/__pycache__/inference.cpython-39.pyc b/__pycache__/inference.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..787a36c06969ac73b2773bb4cd6edbf02d0de055 Binary files /dev/null and b/__pycache__/inference.cpython-39.pyc differ diff --git a/__pycache__/inference_2.cpython-310.pyc b/__pycache__/inference_2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b0c8725c0002251194fa1a8d25e61c562aa52ca Binary files /dev/null and b/__pycache__/inference_2.cpython-310.pyc differ diff --git a/__pycache__/inference_2.cpython-39.pyc b/__pycache__/inference_2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..acb0ffd25c79fe57f018f6212ded3eae383f9b2c Binary files /dev/null and b/__pycache__/inference_2.cpython-39.pyc differ diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..109f5b67b8c4e7bf369585718f5c532aad4b8ee0 --- /dev/null +++ b/app.py @@ -0,0 +1,59 @@ +import gradio as gr +import inference_2 as inference + +# Title and Description +title = " Multimodal Deepfake Detector" +description = "Detect deepfakes and AI-generated content from videos, audio, and images using advanced AI models." + +# Individual Interfaces +video_interface = gr.Interface( + inference.deepfakes_video_predict, + inputs=gr.Video(label="Upload a Video"), + outputs=gr.Textbox(label="Prediction"), + examples=["videos/aaa.mp4", "videos/bbb.mp4"], + cache_examples=False +) + +image_interface = gr.Interface( + inference.deepfakes_image_predict, + inputs=gr.Image(label="Upload an Image"), + outputs=gr.Textbox(label="Prediction"), + examples=["images/lady.jpg", "images/fake_image.jpg"], + cache_examples=False +) + +audio_interface = gr.Interface( + inference.deepfakes_spec_predict, + inputs=gr.Audio(label="Upload an Audio"), + outputs=gr.Textbox(label="Prediction"), + examples=["audios/DF_E_2000027.flac", "audios/DF_E_2000031.flac"], + cache_examples=False +) + +ai_image_detector = gr.Interface( + fn=inference.detect_ai_generated_image, + inputs=gr.Image(label="Upload an Image"), + outputs=gr.Textbox(label="AI-Generated or Human-Created"), + examples=["images/ai_generated.jpg", "images/real.jpeg"], + cache_examples=False +) + +# 🧩 Full UI with Title & Tabs +with gr.Blocks(title=title) as app: + gr.Markdown(f"# {title}") + gr.Markdown(description) + + with gr.Tab("🎬 Video Inference"): + video_interface.render() + + with gr.Tab("🎧 Audio Inference"): + audio_interface.render() + + with gr.Tab("🖼️ Image Inference"): + image_interface.render() + + with gr.Tab("🤖 AI Image Detector"): + ai_image_detector.render() + +if __name__ == '__main__': + app.launch(share=False) diff --git a/audio.py b/audio.py new file mode 100644 index 0000000000000000000000000000000000000000..19ea3cb1a8960ddecf8c6448aaa02b519626f126 --- /dev/null +++ b/audio.py @@ -0,0 +1,2 @@ +import os +os.system("ffprobe -version") diff --git a/audios/DF_E_2000027.flac b/audios/DF_E_2000027.flac new file mode 100644 index 0000000000000000000000000000000000000000..f34641a5da62cc0b8efe5c147c38e83ff704b4dd Binary files /dev/null and b/audios/DF_E_2000027.flac differ diff --git a/audios/DF_E_2000028.flac b/audios/DF_E_2000028.flac new file mode 100644 index 0000000000000000000000000000000000000000..e9d933f4878e32c2c39257097d7306bd23bac7f0 Binary files /dev/null and b/audios/DF_E_2000028.flac differ diff --git a/audios/DF_E_2000031.flac b/audios/DF_E_2000031.flac new file mode 100644 index 0000000000000000000000000000000000000000..45bd4333c36cb93d8ebc958c5f8900ab6e114aaa Binary files /dev/null and b/audios/DF_E_2000031.flac differ diff --git a/audios/DF_E_2000032.flac b/audios/DF_E_2000032.flac new file mode 100644 index 0000000000000000000000000000000000000000..1bd8d0c1872322a847b40088ab23b11d496c9e06 Binary files /dev/null and b/audios/DF_E_2000032.flac differ diff --git a/audios/demo.txt b/audios/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/checkpoints/demo.txt b/checkpoints/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/checkpoints/efficientnet.onnx b/checkpoints/efficientnet.onnx new file mode 100644 index 0000000000000000000000000000000000000000..0cd89dad2a121a682a50c8e0f272c8f0686a41f5 --- /dev/null +++ b/checkpoints/efficientnet.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:206f99f4c4efe6d088ba6e53bfcdec76ffa796a345d50770c037005e3cd11639 +size 23510323 diff --git a/checkpoints/model.pth b/checkpoints/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1baa6c50ea6879a72b6cf3d2c8eafc31d1b74e0d --- /dev/null +++ b/checkpoints/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3de812710093068acee6200b8d162aab074975edffa3edf2ccbe562868e4adf6 +size 117418889 diff --git a/data/__init__.py b/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a02e758d6ac34ba5d1a5dec73626569661aa8756 --- /dev/null +++ b/data/__init__.py @@ -0,0 +1,22 @@ +import torch.utils.data + +class DataProvider(): + + def __init__(self, cfg, dataset, batch_size=None, shuffle=True): + super().__init__() + self.dataset = dataset + if batch_size is None: + batch_size = cfg.BATCH_SIZE + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=batch_size, + shuffle=shuffle, + num_workers=int(cfg.WORKERS), + drop_last=False) + + def __len__(self): + return len(self.dataset) + + def __iter__(self): + for i, data in enumerate(self.dataloader): + yield data \ No newline at end of file diff --git a/data/__pycache__/__init__.cpython-39.pyc b/data/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fa169f1f7144933ef583e6b59c23ac385dceee2 Binary files /dev/null and b/data/__pycache__/__init__.cpython-39.pyc differ diff --git a/data/__pycache__/augmentation_utils.cpython-39.pyc b/data/__pycache__/augmentation_utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de2f7c22abe8da1a4609753773a89c0da4ca82fe Binary files /dev/null and b/data/__pycache__/augmentation_utils.cpython-39.pyc differ diff --git a/data/__pycache__/demo.txt b/data/__pycache__/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/data/__pycache__/dfdt_dataset.cpython-39.pyc b/data/__pycache__/dfdt_dataset.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a604572e5d50e33683ac496d40e1530ddfabcbb7 Binary files /dev/null and b/data/__pycache__/dfdt_dataset.cpython-39.pyc differ diff --git a/data/augmentation_utils.py b/data/augmentation_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9ed98fda861f3a1fcf67de534c79a76b55575163 --- /dev/null +++ b/data/augmentation_utils.py @@ -0,0 +1,88 @@ +import cv2 +import librosa +import numpy as np +import albumentations +from albumentations import (Compose, ImageCompression, GaussNoise, HorizontalFlip, + PadIfNeeded, OneOf,ToGray, ShiftScaleRotate, GaussianBlur, + RandomBrightnessContrast, FancyPCA, HueSaturationValue, BasicTransform) + + +class AudioTransform(BasicTransform): + """ Transform for audio task. This is the main class where we override the targets and update params function for our need""" + @property + def targets(self): + return {"data": self.apply} + + def update_params(self, params, **kwargs): + if hasattr(self, "interpolation"): + params["interpolation"] = self.interpolation + if hasattr(self, "fill_value"): + params["fill_value"] = self.fill_value + return params + +class TimeShifting(AudioTransform): + """ Do time shifting of audio """ + def __init__(self, always_apply=False, p=0.5): + super(TimeShifting, self).__init__(always_apply, p) + + def apply(self,data,**params): + ''' + data : ndarray of audio timeseries + ''' + start_ = int(np.random.uniform(-80000,80000)) + if start_ >= 0: + audio_time_shift = np.r_[data[start_:], np.random.uniform(-0.001,0.001, start_)] + else: + audio_time_shift = np.r_[np.random.uniform(-0.001,0.001, -start_), data[:start_]] + + return audio_time_shift + +class PitchShift(AudioTransform): + """ Do time shifting of audio """ + def __init__(self, always_apply=False, p=0.5 , n_steps=None): + super(PitchShift, self).__init__(always_apply, p) + ''' + nsteps here is equal to number of semitones + ''' + + self.n_steps = n_steps + + def apply(self,data,**params): + ''' + data : ndarray of audio timeseries + ''' + return librosa.effects.pitch_shift(data,sr=16000,n_steps=self.n_steps) + + +class AddGaussianNoise(AudioTransform): + """ Do time shifting of audio """ + def __init__(self, always_apply=False, p=0.5): + super(AddGaussianNoise, self).__init__(always_apply, p) + + + def apply(self,data,**params): + ''' + data : ndarray of audio timeseries + ''' + noise = np.random.randn(len(data)) + data_wn = data + 0.005*noise + return data_wn + + +create_frame_transforms = Compose([ + ImageCompression(quality_lower=60, quality_upper=100, p=0.5), + GaussNoise(p=0.1), + GaussianBlur(blur_limit=3, p=0.05), + HorizontalFlip(), + PadIfNeeded(min_height=256, min_width=256, border_mode=cv2.BORDER_CONSTANT), + OneOf([RandomBrightnessContrast(), FancyPCA(), HueSaturationValue()], p=0.7), + ToGray(p=0.2), + ShiftScaleRotate(shift_limit=0.1, scale_limit=0.2, rotate_limit=10, border_mode=cv2.BORDER_CONSTANT, p=0.5),]) + + + +create_spec_transforms = albumentations.Compose([ + TimeShifting(p=0.9), # here not p=1.0 because your nets should get some difficulties + AddGaussianNoise(p=0.8), + PitchShift(p=0.5,n_steps=4) + ]) diff --git a/data/demo.txt b/data/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/data/dfdt_dataset.py b/data/dfdt_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..f43889c970cc589ab42efaa0b252d650d666ecce --- /dev/null +++ b/data/dfdt_dataset.py @@ -0,0 +1,130 @@ +'''Module for loading the fakeavceleb dataset from tfrecord format''' +import numpy as np +import tensorflow as tf +from data.augmentation_utils import create_frame_transforms, create_spec_transforms + +FEATURE_DESCRIPTION = { + 'video_path': tf.io.FixedLenFeature([], tf.string), + 'image/encoded': tf.io.FixedLenFeature([], tf.string), + 'clip/label/index': tf.io.FixedLenFeature([], tf.int64), + 'clip/label/text': tf.io.FixedLenFeature([], tf.string), + 'WAVEFORM/feature/floats': tf.io.FixedLenFeature([], tf.string) +} + +@tf.function +def _parse_function(example_proto): + + #Parse the input `tf.train.Example` proto using the dictionary above. + example = tf.io.parse_single_example(example_proto, FEATURE_DESCRIPTION) + + video_path = example['video_path'] + video = tf.io.decode_raw(example['image/encoded'], tf.int8) + spectrogram = tf.io.decode_raw(example['WAVEFORM/feature/floats'], tf.float32) + + label = example["clip/label/text"] + label_map = example["clip/label/index"] + + return video, spectrogram, label_map + +@tf.function +def decode_inputs(video, spectrogram, label_map): + '''Decode tensors to arrays with desired shape''' + frame = tf.reshape(video, [10, 3, 256, 256]) + frame = frame[0] / 255 #Pick the first frame and normalize it. + # frame = tf.cast(frame, tf.float32) + + label_map = tf.expand_dims(label_map, axis = 0) + + sample = {'video_reshaped': frame, 'spectrogram': spectrogram, 'label_map': label_map} + return sample + + +def decode_train_inputs(video, spectrogram, label_map): + #Data augmentation for spectograms + spectrogram_shape = spectrogram.shape + spec_augmented = tf.py_function(aug_spec_fn, [spectrogram], tf.float32) + spec_augmented.set_shape(spectrogram_shape) + + frame = tf.reshape(video, [10, 256, 256, 3]) + frame = frame[0] #Pick the first frame. + frame = frame / 255 #Normalize tensor. + + frame_augmented = tf.py_function(aug_img_fn, [frame], tf.uint8) + # frame_augmented.set_shape(frame_shape) + + frame_augmented.set_shape([3, 256, 256]) + label_map = tf.expand_dims(label_map, axis = 0) + + augmented_sample = {'video_reshaped': frame_augmented, 'spectrogram': spec_augmented, 'label_map': label_map} + return augmented_sample + + +def aug_img_fn(frame): + frame = frame.numpy().astype(np.uint8) + frame_data = {'image': frame} + aug_frame_data = create_frame_transforms(**frame_data) + aug_img = aug_frame_data['image'] + aug_img = aug_img.transpose(2, 0, 1) + return aug_img + +def aug_spec_fn(spec): + spec = spec.numpy() + spec_data = {'spec': spec} + aug_spec_data = create_spec_transforms(**spec_data) + aug_spec = aug_spec_data['spec'] + return aug_spec + + +class FakeAVCelebDatasetTrain: + + def __init__(self, args): + self.args = args + self.samples = self.load_features_from_tfrec() + + def load_features_from_tfrec(self): + '''Loads raw features from a tfrecord file and returns them as raw inputs''' + ds = tf.io.matching_files(self.args.data_dir) + files = tf.random.shuffle(ds) + + shards = tf.data.Dataset.from_tensor_slices(files) + dataset = shards.interleave(tf.data.TFRecordDataset) + dataset = dataset.shuffle(buffer_size=100) + + dataset = dataset.map(_parse_function, num_parallel_calls = tf.data.AUTOTUNE) + dataset = dataset.map(decode_train_inputs, num_parallel_calls = tf.data.AUTOTUNE) + dataset = dataset.padded_batch(batch_size = self.args.batch_size) + return dataset + + + def __len__(self): + self.samples = self.load_features_from_tfrec(self.args.data_dir) + cnt = self.samples.reduce(np.int64(0), lambda x, _: x + 1) + cnt = cnt.numpy() + return cnt + +class FakeAVCelebDatasetVal: + + def __init__(self, args): + self.args = args + self.samples = self.load_features_from_tfrec() + + def load_features_from_tfrec(self): + '''Loads raw features from a tfrecord file and returns them as raw inputs''' + ds = tf.io.matching_files(self.args.data_dir) + files = tf.random.shuffle(ds) + + shards = tf.data.Dataset.from_tensor_slices(files) + dataset = shards.interleave(tf.data.TFRecordDataset) + dataset = dataset.shuffle(buffer_size=100) + + dataset = dataset.map(_parse_function, num_parallel_calls = tf.data.AUTOTUNE) + dataset = dataset.map(decode_inputs, num_parallel_calls = tf.data.AUTOTUNE) + dataset = dataset.padded_batch(batch_size = self.args.batch_size) + return dataset + + + def __len__(self): + self.samples = self.load_features_from_tfrec(self.args.data_dir) + cnt = self.samples.reduce(np.int64(0), lambda x, _: x + 1) + cnt = cnt.numpy() + return cnt \ No newline at end of file diff --git a/data/generate_dataset_to_tfrecord.py b/data/generate_dataset_to_tfrecord.py new file mode 100644 index 0000000000000000000000000000000000000000..dfe07905cebfa505ac8e0a39bce810fd3d222ed8 --- /dev/null +++ b/data/generate_dataset_to_tfrecord.py @@ -0,0 +1,178 @@ +#Code outsourced from https://github.com/deepmind/dmvr/tree/master and later modified. + +"""Python script to generate TFRecords of SequenceExample from raw videos.""" + +import contextlib +import math +import os +import cv2 +from typing import Dict, Optional, Sequence +import moviepy.editor +from absl import app +from absl import flags +import ffmpeg +import numpy as np +import pandas as pd +import tensorflow as tf + +import warnings +warnings.filterwarnings('ignore') + +flags.DEFINE_string("csv_path", "fakeavceleb_1k.csv", "Input csv") +flags.DEFINE_string("output_path", "fakeavceleb_tfrec", "Tfrecords output path.") +flags.DEFINE_string("video_root_path", "./", + "Root directory containing the raw videos.") +flags.DEFINE_integer( + "num_shards", 4, "Number of shards to output, -1 means" + "it will automatically adapt to the sqrt(num_examples).") +flags.DEFINE_bool("decode_audio", False, "Whether or not to decode the audio") +flags.DEFINE_bool("shuffle_csv", False, "Whether or not to shuffle the csv.") +FLAGS = flags.FLAGS + + +_JPEG_HEADER = b"\xff\xd8" + + +@contextlib.contextmanager +def _close_on_exit(writers): + """Call close on all writers on exit.""" + try: + yield writers + finally: + for writer in writers: + writer.close() + + +def add_float_list(key: str, values: Sequence[float], + sequence: tf.train.SequenceExample): + sequence.feature_lists.feature_list[key].feature.add( + ).float_list.value[:] = values + + +def add_bytes_list(key: str, values: Sequence[bytes], + sequence: tf.train.SequenceExample): + sequence.feature_lists.feature_list[key].feature.add().bytes_list.value[:] = values + + +def add_int_list(key: str, values: Sequence[int], + sequence: tf.train.SequenceExample): + sequence.feature_lists.feature_list[key].feature.add().int64_list.value[:] = values + + +def set_context_int_list(key: str, value: Sequence[int], + sequence: tf.train.SequenceExample): + sequence.context.feature[key].int64_list.value[:] = value + + +def set_context_bytes(key: str, value: bytes, + sequence: tf.train.SequenceExample): + sequence.context.feature[key].bytes_list.value[:] = (value,) + +def set_context_bytes_list(key: str, value: Sequence[bytes], + sequence: tf.train.SequenceExample): + sequence.context.feature[key].bytes_list.value[:] = value + + +def set_context_float(key: str, value: float, + sequence: tf.train.SequenceExample): + sequence.context.feature[key].float_list.value[:] = (value,) + + +def set_context_int(key: str, value: int, sequence: tf.train.SequenceExample): + sequence.context.feature[key].int64_list.value[:] = (value,) + + +def extract_frames(video_path, fps = 10, min_resize = 256): + '''Load n number of frames from a video''' + v_cap = cv2.VideoCapture(video_path) + v_len = int(v_cap.get(cv2.CAP_PROP_FRAME_COUNT)) + + if fps is None: + sample = np.arange(0, v_len) + else: + sample = np.linspace(0, v_len - 1, fps).astype(int) + + frames = [] + for j in range(v_len): + success = v_cap.grab() + if j in sample: + success, frame = v_cap.retrieve() + if not success: + continue + + frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + frame = cv2.resize(frame, (min_resize, min_resize)) + frames.append(frame) + + v_cap.release() + frame_np = np.stack(frames) + return frame_np.tobytes() + +def extract_audio(video_path: str, + sampling_rate: int = 16_000): + """Extract raw mono audio float list from video_path with ffmpeg.""" + video = moviepy.editor.VideoFileClip(video_path) + audio = video.audio.to_soundarray() + #Load first channel. + audio = audio[:, 0] + + return np.array(audio) + +#Each of the features can be coerced into a tf.train.Example-compatible type using one of the _bytes_feature, _float_feature and the _int64_feature. +#You can then create a tf.train.Example message from these encoded features. + +def serialize_example(video_path: str, label_name: str, label_map: Optional[Dict[str, int]] = None): + # Initiate the sequence example. + seq_example = tf.train.SequenceExample() + + imgs_encoded = extract_frames(video_path, fps = 10) + + audio = extract_audio(video_path) + + set_context_bytes(f'image/encoded', imgs_encoded, seq_example) + set_context_bytes("video_path", video_path.encode(), seq_example) + set_context_bytes("WAVEFORM/feature/floats", audio.tobytes(), seq_example) + set_context_int("clip/label/index", label_map[label_name], seq_example) + set_context_bytes("clip/label/text", label_name.encode(), seq_example) + return seq_example + + +def main(argv): + del argv + # reads the input csv. + input_csv = pd.read_csv(FLAGS.csv_path) + if FLAGS.num_shards == -1: + num_shards = int(math.sqrt(len(input_csv))) + else: + num_shards = FLAGS.num_shards + # Set up the TFRecordWriters. + basename = os.path.splitext(os.path.basename(FLAGS.csv_path))[0] + shard_names = [ + os.path.join(FLAGS.output_path, f"{basename}-{i:05d}-of-{num_shards:05d}") + for i in range(num_shards) + ] + writers = [tf.io.TFRecordWriter(shard_name) for shard_name in shard_names] + + if "label" in input_csv: + unique_labels = list(set(input_csv["label"].values)) + l_map = {unique_labels[i]: i for i in range(len(unique_labels))} + else: + l_map = None + + if FLAGS.shuffle_csv: + input_csv = input_csv.sample(frac=1) + with _close_on_exit(writers) as writers: + row_count = 0 + for row in input_csv.itertuples(): + index = row[0] + v = row[1] + if os.name == 'posix': + v = v.str.replace('\\', '/') + l = row[2] + row_count += 1 + print("Processing example %d of %d (%d%%) \r" %(row_count, len(input_csv), row_count * 100 / len(input_csv)), end="") + seq_ex = serialize_example(video_path = v, label_name = l,label_map = l_map) + writers[index % len(writers)].write(seq_ex.SerializeToString()) + +if __name__ == "__main__": + app.run(main) diff --git a/datasets/demo.txt b/datasets/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/datasets/fakeavceleb_100.csv b/datasets/fakeavceleb_100.csv new file mode 100644 index 0000000000000000000000000000000000000000..84ff25a832d9e0e2049b8b410f1ebeb1afbf44f4 --- /dev/null +++ b/datasets/fakeavceleb_100.csv @@ -0,0 +1,101 @@ +video_path,label +FakeAVCeleb/RealVideo-RealAudio/African/men/id00076/00109.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00166/00010.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00173/00118.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00366/00118.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00391/00052.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00475/00099.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00476/00109.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00478/00206.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00518/00031.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00701/00092.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00761/00072.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00781/00092.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00830/00143.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00944/00135.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id00987/00160.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01036/00010.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01076/00005.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01170/00021.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01171/00053.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01179/00160.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01207/00320.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01236/00005.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01392/00167.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01452/00001.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01521/00109.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01528/00017.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01530/00002.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01544/00044.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01597/00005.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01598/00044.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01610/00090.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01637/00002.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01691/00045.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01717/00005.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01779/00010.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01835/00130.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01856/00006.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01920/00099.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01933/00028.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01972/00078.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id01995/00071.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02005/00052.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02040/00476.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02051/00015.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02268/00036.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02296/00019.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02316/00094.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02342/00191.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id02494/00050.mp4,real +FakeAVCeleb/RealVideo-RealAudio/African/men/id04727/00007.mp4,real +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_10_id00476_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_10_id01076_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_10_id01179_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_10_id02005_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_10_id02342_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_12_id00518_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_12_id00761_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_12_id00987_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_12_id01856_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_12_id02296_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_2_id00166_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_2_id00701_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_2_id01236_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_2_id01521_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_2_id01598_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_4_id01392_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_4_id01528_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_4_id01691_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_4_id01995_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_4_id02296_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_7_id00166_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_7_id00478_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_7_id01452_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_7_id01717_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_7_id01995_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_8_id00166_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_8_id00701_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_8_id00761_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_8_id01170_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_8_id02005_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_9_id00076_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_9_id01036_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_9_id01452_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_9_id01528_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00076/00109_9_id02005_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00166/00010_id01637_5VjcPZm8knM_faceswap_id00166_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00166/00010_id01637_5VjcPZm8knM_faceswap_id00761_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00166/00010_id01637_5VjcPZm8knM_faceswap_id01171_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00166/00010_id01637_5VjcPZm8knM_faceswap_id01530_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00166/00010_id01637_5VjcPZm8knM_faceswap_id01598_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00173/00118_id00476_UgdYVJ6xPYg_faceswap_id00166_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00173/00118_id00476_UgdYVJ6xPYg_faceswap_id00173_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00173/00118_id00476_UgdYVJ6xPYg_faceswap_id01530_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00173/00118_id00476_UgdYVJ6xPYg_faceswap_id01598_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00173/00118_id00476_UgdYVJ6xPYg_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00366/00118_id00076_Isiq7cA-DNE_faceswap_id01170_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00366/00118_id00076_Isiq7cA-DNE_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00366/00118_id00076_Isiq7cA-DNE_faceswap_id02316_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00366/00118_id00076_Isiq7cA-DNE_faceswap_id02342_wavtolip.mp4,fake +FakeAVCeleb/FakeVideo-FakeAudio/African/men/id00366/00118_id00076_Isiq7cA-DNE_faceswap_id02494_wavtolip.mp4,fake diff --git a/datasets/fakeavceleb_1k.csv b/datasets/fakeavceleb_1k.csv new file mode 100644 index 0000000000000000000000000000000000000000..cd2f1857dd17ac0231aeb8c77f5cb81a9dee350a --- /dev/null +++ b/datasets/fakeavceleb_1k.csv @@ -0,0 +1,1001 @@ +video_path,label +FakeAVCeleb\RealVideo-RealAudio\African\men\id00076\00109.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00166\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00173\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00366\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00391\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00475\00099.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00476\00109.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00478\00206.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00518\00031.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00701\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00761\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00781\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00830\00143.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00944\00135.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id00987\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01036\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01076\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01170\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01171\00053.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01179\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01207\00320.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01236\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01392\00167.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01452\00001.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01521\00109.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01528\00017.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01530\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01544\00044.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01597\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01598\00044.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01610\00090.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01637\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01691\00045.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01717\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01779\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01835\00130.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01856\00006.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01920\00099.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01933\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01972\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id01995\00071.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02005\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02040\00476.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02051\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02268\00036.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02296\00019.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02316\00094.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02342\00191.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id02494\00050.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\men\id04727\00007.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00220\00027.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00359\00053.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00371\00099.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00460\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00568\00384.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00577\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00592\00017.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00707\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00829\00271.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id00832\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id01178\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id01532\00065.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id01661\00059.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id01783\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id01838\00126.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id01907\00148.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02071\00195.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02301\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02508\00083.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02586\00042.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02617\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02721\00424.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02808\00056.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02824\00130.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02838\00080.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id02948\00298.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id03103\00130.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id03569\00065.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id03656\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id03658\00077.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id03713\00249.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id03747\00273.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04055\00001.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04245\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04374\00032.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04376\00181.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04437\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04540\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04547\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04689\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04705\00408.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04736\00083.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04820\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id04939\00174.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id05106\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id05231\00149.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id05235\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id05251\00033.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id05252\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\African\women\id05980\00143.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00018\00181.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00020\00206.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00021\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00029\00288.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00049\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00052\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00060\00307.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00062\00278.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00087\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00088\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00169\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00179\00143.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00184\00241.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00243\00037.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00264\00257.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00345\00243.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00529\00409.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00696\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00708\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00775\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00777\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00943\00304.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00945\00107.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id00971\00253.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01035\00012.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01042\00154.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01044\00336.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01048\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01096\00037.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01105\00083.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01124\00063.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01163\00195.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01168\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01172\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01175\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01182\00167.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01192\00217.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01201\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01210\00283.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01211\00023.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id01239\00280.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id03525\00048.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id03668\00143.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id03678\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id03757\00149.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id04034\00009.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id04073\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id04216\00470.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id04219\00130.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\men\id04221\00053.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00025\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00097\00162.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00098\00004.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00100\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00145\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00180\00206.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00190\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00231\00037.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00261\00048.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00272\00195.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00291\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00381\00030.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00383\00171.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00385\00439.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00398\00016.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00418\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00428\00017.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00431\00039.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00458\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00462\00143.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00555\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00575\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00616\00305.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00618\00195.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00680\00110.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00752\00340.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00835\00195.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00841\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00842\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id00848\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01004\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01005\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01075\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01091\00236.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01216\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01217\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01223\00255.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01225\00300.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01227\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01231\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01238\00037.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01245\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id01248\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id02464\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id02466\00136.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id03556\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id03605\00048.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id03696\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id03707\00055.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (American)\women\id03781\00113.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id00056\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id00126\00173.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id00560\00041.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id00597\00019.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id00740\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id00863\00069.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id01204\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id01212\00183.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id01215\00001.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id01589\00017.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id01683\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id02332\00055.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id02365\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id02493\00073.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id02553\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id02561\02561.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id03028\00466.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id03168\03168.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id03889\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id03965\00051.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04111\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04222\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04687\00066.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04691\1.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04726\00245.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04774\00032.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04789\002121.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id04884\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id05268\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id05332\00065.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id05383\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id05479\05479.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id05743\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06152\06152.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06269\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06467\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06470\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06535\00183.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06591\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06594\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06776\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06807\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id06878\00001.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id07102\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id07338\00003.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id08299\00110.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id08613\00074.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id08652\00006.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id09053\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\men\id09143\00056.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00137\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00363\00014.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00430\00209.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00566\00032.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00579\00030.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00582\00006.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00763\00074.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id00935\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id01281\00040.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id01451\00099.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id02587\00020.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id02807\00032.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id03211\00032.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id03379\00032.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id03940\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id04057\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id04066\00013.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id04144\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id04414\00001.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id04701\00017.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id05576\00368.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id05620\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id05631\00073.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id05844\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06054\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06060\00219.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06061\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06065\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06066\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06158\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06225\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06388\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06427\00138.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06443\00232.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id06462\00014.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id07039\00105.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id07383\00011.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id07739\00019.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id07799\00063.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id07901\00040.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id08139\00067.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id08397\00167.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id08402\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id08819\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id09116\00026.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id09125\00098.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id09171\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id09174\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id09175\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (East)\women\id09181\00048.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00055\00120.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00063\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00171\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00183\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00185\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00186\00120.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00187\00360.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00192\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00225\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00241\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00253\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00265\00130.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00266\00470.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00282\00268.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00292\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00305\00113.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00306\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00358\00217.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00368\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00415\00017.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00498\00014.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00519\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00520\00187.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00535\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00548\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00554\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00559\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00594\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00694\00340.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00709\00206.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00909\00037.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00919\00063.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00946\00126.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00963\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00981\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00990\00160.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id00999\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01051\00322.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01052\00076.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01058\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01098\00044.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01099\00206.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01102\00197.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01123\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01126\00040.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01154\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01156\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id01157\00048.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id02567\00040.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\men\id03205\00150.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00042\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00068\00004.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00071\00014.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00188\00020.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00232\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00234\00063.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00262\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00270\00088.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00271\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00287\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00325\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00328\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00330\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00332\00293.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00365\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00373\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00374\00311.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00379\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00395\00420.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00403\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00434\00046.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00484\00202.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00491\00122.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00495\00027.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00496\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00569\00239.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00581\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00589\00130.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00591\00001.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00633\00088.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00634\00078.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00735\00037.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00806\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00813\00169.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00823\00125.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00826\00065.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id00897\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id01001\00086.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id01002\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id01018\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03371\00430.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03589\00002.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03620\00081.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03649\00001.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03651\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03716\00040.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03816\00093.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03844\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03858\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Caucasian (European)\women\id03941\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00032\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00033\00276.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00078\00114.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00082\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00103\00241.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00350\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00414\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00459\00382.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00685\00146.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00732\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00745\00165.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00769\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00773\00038.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00816\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00857\00347.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id00860\00154.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id03180\00039.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id03344\00114.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id03599\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id03945\00063.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04526\00317.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04537\00083.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04554\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04560\00195.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04561\00248.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04562\00221.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04599\00111.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04601\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id04928\00027.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id06334\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id06354\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id06355\00347.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id06753\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07058\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07108\00412.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07161\00159.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07163\00141.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07165\00368.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07179\00206.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07182\00040.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07194\00014.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07195\00186.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07200\00045.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07210\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07233\00010.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07463\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id07768\00143.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id08313\00202.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id08314\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\men\id08457\00417.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00043\00135.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00080\00281.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00149\00284.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00235\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00417\00069.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00461\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00488\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00739\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id00747\00053.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id01026\00083.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id02089\00092.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id02310\00139.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id02619\00015.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id03559\00023.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id03815\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id03897\00021.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id03985\00005.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04070\00072.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04490\00054.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04529\00186.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04530\00231.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04564\00417.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04582\00180.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04583\00077.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id04927\00013.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id05434\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id05435\00107.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id05478\00135.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id05845\00027.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id05920\00161.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id05931\00013.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06232\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06254\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06268\00159.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06343\00023.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06428\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06437\00028.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06438\00110.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06439\00118.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06445\00150.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id06752\00221.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07008\00175.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07049\00043.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07051\00083.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07078\00405.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07136\00052.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07236\00143.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07377\00025.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07686\00254.mp4,real +FakeAVCeleb\RealVideo-RealAudio\Asian (South)\women\id07689\00028.mp4,real +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_10_id00476_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_10_id01076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_10_id01179_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_10_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_10_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_12_id00518_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_12_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_12_id00987_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_12_id01856_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_12_id02296_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_2_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_2_id00701_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_2_id01236_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_2_id01521_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_2_id01598_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_4_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_4_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_4_id01691_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_4_id01995_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_4_id02296_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_7_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_7_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_7_id01452_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_7_id01717_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_7_id01995_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_8_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_8_id00701_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_8_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_8_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_8_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_9_id00076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_9_id01036_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_9_id01452_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_9_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00076\00109_9_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00166\00010_id01637_5VjcPZm8knM_faceswap_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00166\00010_id01637_5VjcPZm8knM_faceswap_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00166\00010_id01637_5VjcPZm8knM_faceswap_id01171_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00166\00010_id01637_5VjcPZm8knM_faceswap_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00166\00010_id01637_5VjcPZm8knM_faceswap_id01598_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00173\00118_id00476_UgdYVJ6xPYg_faceswap_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00173\00118_id00476_UgdYVJ6xPYg_faceswap_id00173_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00173\00118_id00476_UgdYVJ6xPYg_faceswap_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00173\00118_id00476_UgdYVJ6xPYg_faceswap_id01598_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00173\00118_id00476_UgdYVJ6xPYg_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00366\00118_id00076_Isiq7cA-DNE_faceswap_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00366\00118_id00076_Isiq7cA-DNE_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00366\00118_id00076_Isiq7cA-DNE_faceswap_id02316_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00366\00118_id00076_Isiq7cA-DNE_faceswap_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00366\00118_id00076_Isiq7cA-DNE_faceswap_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00391\00052_id00476_UgdYVJ6xPYg_faceswap_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00391\00052_id00476_UgdYVJ6xPYg_faceswap_id01179_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00391\00052_id00476_UgdYVJ6xPYg_faceswap_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00391\00052_id00476_UgdYVJ6xPYg_faceswap_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00391\00052_id00476_UgdYVJ6xPYg_faceswap_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00475\00099_0_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00475\00099_0_id01920_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00475\00099_0_id01972_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00475\00099_0_id02316_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00475\00099_0_id04727_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00476_UgdYVJ6xPYg_faceswap_id00076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00476_UgdYVJ6xPYg_faceswap_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00476_UgdYVJ6xPYg_faceswap_id00781_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00476_UgdYVJ6xPYg_faceswap_id00830_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00476_UgdYVJ6xPYg_faceswap_id01207_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00781_fvsSae9yc0A_faceswap_id00476_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00781_fvsSae9yc0A_faceswap_id00944_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00781_fvsSae9yc0A_faceswap_id01597_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00781_fvsSae9yc0A_faceswap_id01691_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00476\00109_id00781_fvsSae9yc0A_faceswap_id04727_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00478\00109_11_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00478\00109_11_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00478\00109_11_id01856_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00478\00109_11_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00478\00109_11_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_0_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_0_id00391_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_0_id00830_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_0_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_0_id02268_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_1_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_1_id00987_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_1_id01076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_1_id01207_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_1_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_2_id01544_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_2_id01598_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_2_id01717_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_2_id01835_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_2_id02296_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_3_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_3_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_3_id01691_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_3_id02040_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00518\00031_3_id02268_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00701\00092_id01036_AohKaMtIHxA_faceswap_id00391_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00701\00092_id01036_AohKaMtIHxA_faceswap_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00701\00092_id01036_AohKaMtIHxA_faceswap_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00701\00092_id01036_AohKaMtIHxA_faceswap_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00701\00092_id01036_AohKaMtIHxA_faceswap_id01972_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00761\00072_id01835_UZbWA0QfXXA_faceswap_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00761\00072_id01835_UZbWA0QfXXA_faceswap_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00761\00072_id01835_UZbWA0QfXXA_faceswap_id01036_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00761\00072_id01835_UZbWA0QfXXA_faceswap_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00761\00072_id01835_UZbWA0QfXXA_faceswap_id01717_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00781\00092_id00476_UgdYVJ6xPYg_faceswap_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00781\00092_id00476_UgdYVJ6xPYg_faceswap_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00781\00092_id00476_UgdYVJ6xPYg_faceswap_id01972_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00781\00092_id00476_UgdYVJ6xPYg_faceswap_id01995_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00781\00092_id00476_UgdYVJ6xPYg_faceswap_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00830\00143_id00076_Isiq7cA-DNE_faceswap_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00830\00143_id00076_Isiq7cA-DNE_faceswap_id01207_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00830\00143_id00076_Isiq7cA-DNE_faceswap_id01544_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00830\00143_id00076_Isiq7cA-DNE_faceswap_id01920_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00944\00135_id01528_SBAS9Kcb8QY_faceswap_id01179_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00987\00160_id02005_7_Egh9mW5y4_faceswap_id01236_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00987\00160_id02005_7_Egh9mW5y4_faceswap_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00987\00160_id02005_7_Egh9mW5y4_faceswap_id01691_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00987\00160_id02005_7_Egh9mW5y4_faceswap_id02040_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id00987\00160_id02005_7_Egh9mW5y4_faceswap_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01036\00010_id00701_lW6uzLIOwd0_faceswap_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01036\00010_id00701_lW6uzLIOwd0_faceswap_id01171_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01036\00010_id00701_lW6uzLIOwd0_faceswap_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01036\00010_id00701_lW6uzLIOwd0_faceswap_id01597_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01076\00005_id01207_mt129WTRSII_faceswap_id00391_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01076\00005_id01207_mt129WTRSII_faceswap_id00781_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01076\00005_id01207_mt129WTRSII_faceswap_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01076\00005_id01207_mt129WTRSII_faceswap_id02040_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01076\00005_id01207_mt129WTRSII_faceswap_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01170\00021_id01933_I5XXxgK7QpE_faceswap_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01170\00021_id01933_I5XXxgK7QpE_faceswap_id01597_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01170\00021_id01933_I5XXxgK7QpE_faceswap_id01637_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01170\00021_id01933_I5XXxgK7QpE_faceswap_id01856_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01171\00053_id02494_lObg47hQleE_faceswap_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01171\00053_id02494_lObg47hQleE_faceswap_id00476_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01171\00053_id02494_lObg47hQleE_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01171\00053_id02494_lObg47hQleE_faceswap_id01835_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01171\00053_id02494_lObg47hQleE_faceswap_id02051_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01179\00160_id02005_7_Egh9mW5y4_faceswap_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01179\00160_id02005_7_Egh9mW5y4_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01179\00160_id02005_7_Egh9mW5y4_faceswap_id01835_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01179\00160_id02005_7_Egh9mW5y4_faceswap_id01972_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01179\00160_id02005_7_Egh9mW5y4_faceswap_id02316_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01207\00320_id00076_Isiq7cA-DNE_faceswap_id00366_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01207\00320_id00076_Isiq7cA-DNE_faceswap_id00701_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01207\00320_id00076_Isiq7cA-DNE_faceswap_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01207\00320_id00076_Isiq7cA-DNE_faceswap_id01597_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01207\00320_id00076_Isiq7cA-DNE_faceswap_id04727_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01236\00005_id01610_l8zb_iaDJJA_faceswap_id00366_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01236\00005_id01610_l8zb_iaDJJA_faceswap_id00830_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01236\00005_id01610_l8zb_iaDJJA_faceswap_id01076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01236\00005_id01610_l8zb_iaDJJA_faceswap_id01691_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01236\00005_id01610_l8zb_iaDJJA_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_2_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_2_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_2_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_2_id01207_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_2_id01521_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_3_id00830_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_3_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_3_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_3_id01933_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01392\00167_3_id02040_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01452\00001_id01528_SBAS9Kcb8QY_faceswap_id00173_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01452\00001_id01528_SBAS9Kcb8QY_faceswap_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01452\00001_id01528_SBAS9Kcb8QY_faceswap_id00987_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01452\00001_id01528_SBAS9Kcb8QY_faceswap_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01452\00001_id01528_SBAS9Kcb8QY_faceswap_id01717_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01528\00017_id01452_4MqeoSxSy3w_faceswap_id00701_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01528\00017_id01452_4MqeoSxSy3w_faceswap_id01076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01528\00017_id01452_4MqeoSxSy3w_faceswap_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01528\00017_id01452_4MqeoSxSy3w_faceswap_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01528\00017_id01452_4MqeoSxSy3w_faceswap_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01544\00044_3_id00166_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01544\00044_3_id00173_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01544\00044_3_id00830_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01544\00044_3_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01544\00044_3_id02268_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_0_id00076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_1_id01171_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_1_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_1_id01544_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_1_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_1_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_2_id00987_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_2_id01236_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_2_id01995_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_2_id02040_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_2_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_3_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_3_id00781_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_3_id01528_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_3_id01920_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01597\00005_3_id02268_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01598\00109_1_id01637_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01598\00109_1_id01691_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01598\00109_1_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01598\00109_1_id02051_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01598\00109_1_id02316_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01610\00090_id01236_7WdumGR5-JM_faceswap_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01610\00090_id01236_7WdumGR5-JM_faceswap_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01610\00090_id01236_7WdumGR5-JM_faceswap_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01610\00090_id01236_7WdumGR5-JM_faceswap_id02040_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01610\00090_id01236_7WdumGR5-JM_faceswap_id02051_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_1_id01598_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_1_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_1_id01920_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_1_id02296_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_1_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_2_id00701_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_2_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_2_id00944_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_2_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01637\00002_2_id01452_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01691\00045_id01779_HgyHpDEo_jk_faceswap_id00830_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01691\00045_id01779_HgyHpDEo_jk_faceswap_id01236_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01691\00045_id01779_HgyHpDEo_jk_faceswap_id02040_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01691\00045_id01779_HgyHpDEo_jk_faceswap_id02268_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01717\00005_id02005_7_Egh9mW5y4_faceswap_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01717\00005_id02005_7_Egh9mW5y4_faceswap_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01717\00005_id02005_7_Egh9mW5y4_faceswap_id01691_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01717\00005_id02005_7_Egh9mW5y4_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01717\00005_id02005_7_Egh9mW5y4_faceswap_id04727_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01779\00010_id01691_IVtS5z8Jrrk_faceswap_id00173_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01779\00010_id01691_IVtS5z8Jrrk_faceswap_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01779\00010_id01691_IVtS5z8Jrrk_faceswap_id00701_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01779\00010_id01691_IVtS5z8Jrrk_faceswap_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01779\00010_id01691_IVtS5z8Jrrk_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01835\00130_id00761_QtTNFhCCgzw_faceswap_id00391_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01835\00130_id00761_QtTNFhCCgzw_faceswap_id00518_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01835\00130_id00761_QtTNFhCCgzw_faceswap_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01835\00130_id00761_QtTNFhCCgzw_faceswap_id02051_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01835\00130_id00761_QtTNFhCCgzw_faceswap_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01856\00109_3_id01207_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01856\00109_3_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01856\00109_3_id01530_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01856\00109_3_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01856\00109_3_id02051_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01920\00099_id00476_UgdYVJ6xPYg_faceswap_id00476_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01920\00099_id00476_UgdYVJ6xPYg_faceswap_id00944_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01920\00099_id00476_UgdYVJ6xPYg_faceswap_id01597_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01920\00099_id00476_UgdYVJ6xPYg_faceswap_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01920\00099_id00476_UgdYVJ6xPYg_faceswap_id02316_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01933\00028_3_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01933\00028_3_id00518_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01933\00028_3_id00987_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01933\00028_3_id01995_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01933\00028_3_id02494_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_2_id01171_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_2_id01179_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_2_id01207_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_2_id01597_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_2_id01717_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_3_id00366_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_3_id01392_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_3_id01544_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_3_id01779_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id01972\00078_3_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02005\00052_id02342_RJPBPhJB8TA_faceswap_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02005\00052_id02342_RJPBPhJB8TA_faceswap_id00518_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02005\00052_id02342_RJPBPhJB8TA_faceswap_id01207_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02005\00052_id02342_RJPBPhJB8TA_faceswap_id01544_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02005\00052_id02342_RJPBPhJB8TA_faceswap_id01717_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_0_id00761_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_0_id01076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_0_id01835_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_0_id02051_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_0_id02296_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_2_id00478_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_2_id01170_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_2_id01452_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_2_id01610_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02296\00019_2_id01920_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02342\00191_id02005_7_Egh9mW5y4_faceswap_id01076_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02342\00191_id02005_7_Egh9mW5y4_faceswap_id01598_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02342\00191_id02005_7_Egh9mW5y4_faceswap_id01835_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02342\00191_id02005_7_Egh9mW5y4_faceswap_id02316_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02342\00191_id02005_7_Egh9mW5y4_faceswap_id02342_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02494\00050_id00475_xQjvXRcnPvw_faceswap_id00475_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02494\00050_id00475_xQjvXRcnPvw_faceswap_id01995_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02494\00050_id00475_xQjvXRcnPvw_faceswap_id02005_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02494\00050_id00475_xQjvXRcnPvw_faceswap_id02296_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\men\id02494\00050_id00475_xQjvXRcnPvw_faceswap_id04727_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00220\00027_id02586_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00220\00027_id03569_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00220\00027_id03658_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00220\00027_id04376_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00220\00027_id05251_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00359\00053_id01838_q_lUk55OrL0_faceswap_id00568_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00359\00053_id01838_q_lUk55OrL0_faceswap_id03658_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00359\00053_id01838_q_lUk55OrL0_faceswap_id04736_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00359\00053_id01838_q_lUk55OrL0_faceswap_id05106_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00359\00053_id01838_q_lUk55OrL0_faceswap_id05252_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00460\00005_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00460\00005_id00460_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00460\00005_id01178_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00460\00005_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00460\00005_id02808_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00568\00384_id05252_CMxIX3absYM_faceswap_id00577_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00568\00384_id05252_CMxIX3absYM_faceswap_id00707_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00568\00384_id05252_CMxIX3absYM_faceswap_id01661_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00568\00384_id05252_CMxIX3absYM_faceswap_id03747_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00568\00384_id05252_CMxIX3absYM_faceswap_id05252_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00577\00010_id01532_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00577\00010_id01907_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00577\00010_id04055_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00577\00010_id04376_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00577\00010_id04736_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00592\00017_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00592\00017_id01661_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00592\00017_id01838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00592\00017_id04055_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00592\00017_id05252_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00707\00052_id04820_64ybrA1atlM_faceswap_id02824_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00707\00052_id04820_64ybrA1atlM_faceswap_id04376_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00707\00052_id04820_64ybrA1atlM_faceswap_id04547_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00707\00052_id04820_64ybrA1atlM_faceswap_id04689_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00707\00052_id04820_64ybrA1atlM_faceswap_id04820_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00829\00271_id01178_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00829\00271_id03103_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00829\00271_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00829\00271_id04736_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00829\00271_id05106_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00832\00078_id00371_t20i0HtPwW0_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00832\00078_id00371_t20i0HtPwW0_faceswap_id00832_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00832\00078_id00371_t20i0HtPwW0_faceswap_id01178_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00832\00078_id00371_t20i0HtPwW0_faceswap_id04055_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id00832\00078_id00371_t20i0HtPwW0_faceswap_id04540_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01178\00028_id05252_CMxIX3absYM_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01178\00028_id05252_CMxIX3absYM_faceswap_id01838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01178\00028_id05252_CMxIX3absYM_faceswap_id02071_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01178\00028_id05252_CMxIX3absYM_faceswap_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01178\00028_id05252_CMxIX3absYM_faceswap_id04437_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01532\00065_id00220_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01532\00065_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01532\00065_id02508_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01532\00065_id02824_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01532\00065_id05231_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01661\00059_id00577_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01661\00059_id00832_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01661\00059_id01178_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01661\00059_id02586_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01661\00059_id04055_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01783\00015_id02948_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01783\00015_id03569_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01783\00015_id03713_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01783\00015_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01783\00015_id05235_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01838\00126_id05235_ASy8lP3SRtw_faceswap_id00568_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01838\00126_id05235_ASy8lP3SRtw_faceswap_id00829_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01838\00126_id05235_ASy8lP3SRtw_faceswap_id01838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01838\00126_id05235_ASy8lP3SRtw_faceswap_id02071_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01838\00126_id05235_ASy8lP3SRtw_faceswap_id05106_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01907\00148_id05235_ASy8lP3SRtw_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01907\00148_id05235_ASy8lP3SRtw_faceswap_id03656_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01907\00148_id05235_ASy8lP3SRtw_faceswap_id04437_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01907\00148_id05235_ASy8lP3SRtw_faceswap_id05251_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id01907\00148_id05235_ASy8lP3SRtw_faceswap_id05252_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02301\00092_id00829_aMEvVaUBq2Y_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02301\00092_id00829_aMEvVaUBq2Y_faceswap_id01838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02301\00092_id00829_aMEvVaUBq2Y_faceswap_id02508_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02301\00092_id00829_aMEvVaUBq2Y_faceswap_id04055_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02301\00092_id00829_aMEvVaUBq2Y_faceswap_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02508\00083_id03658_8Wtu9VXKqjY_faceswap_id01532_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02508\00083_id03658_8Wtu9VXKqjY_faceswap_id01661_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02508\00083_id03658_8Wtu9VXKqjY_faceswap_id04540_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02508\00083_id03658_8Wtu9VXKqjY_faceswap_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02508\00083_id03658_8Wtu9VXKqjY_faceswap_id05980_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02586\00042_id04939_i4v2cXo9HIQ_faceswap_id00460_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02586\00042_id04939_i4v2cXo9HIQ_faceswap_id04245_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02586\00042_id04939_i4v2cXo9HIQ_faceswap_id04374_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02586\00042_id04939_i4v2cXo9HIQ_faceswap_id04820_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02586\00042_id04939_i4v2cXo9HIQ_faceswap_id05106_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02617\00028_id00592_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02617\00028_id02838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02617\00028_id03713_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02617\00028_id04689_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02617\00028_id04736_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02721\00424_id03658_8Wtu9VXKqjY_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02721\00424_id03658_8Wtu9VXKqjY_faceswap_id02824_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02721\00424_id03658_8Wtu9VXKqjY_faceswap_id02838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02721\00424_id03658_8Wtu9VXKqjY_faceswap_id02948_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02721\00424_id03658_8Wtu9VXKqjY_faceswap_id04820_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02808\00056_id03103_wiCYm3THQPw_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02808\00056_id03103_wiCYm3THQPw_faceswap_id00832_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02808\00056_id03103_wiCYm3THQPw_faceswap_id02301_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02808\00056_id03103_wiCYm3THQPw_faceswap_id05235_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02808\00056_id03103_wiCYm3THQPw_faceswap_id05252_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02824\00130_id03747_wQOOhZvnrq4_faceswap_id01783_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02824\00130_id03747_wQOOhZvnrq4_faceswap_id02617_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02824\00130_id03747_wQOOhZvnrq4_faceswap_id04245_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02824\00130_id03747_wQOOhZvnrq4_faceswap_id05106_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02824\00130_id03747_wQOOhZvnrq4_faceswap_id05231_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02948\00298_id04820_64ybrA1atlM_faceswap_id00460_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02948\00298_id04820_64ybrA1atlM_faceswap_id01178_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02948\00298_id04820_64ybrA1atlM_faceswap_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02948\00298_id04820_64ybrA1atlM_faceswap_id04374_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id02948\00298_id04820_64ybrA1atlM_faceswap_id05251_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03569\00065_id00220_WlHLlTQKj8g_faceswap_id00220_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03569\00065_id00220_WlHLlTQKj8g_faceswap_id01178_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03569\00065_id00220_WlHLlTQKj8g_faceswap_id02586_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03569\00065_id00220_WlHLlTQKj8g_faceswap_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03569\00065_id00220_WlHLlTQKj8g_faceswap_id05252_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_1_id00359_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_1_id00460_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_1_id00592_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_1_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_1_id02838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_3_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_3_id00592_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_3_id01532_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_3_id02301_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03656\00052_3_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03658\00077_id00371_t20i0HtPwW0_faceswap_id00220_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03658\00077_id00371_t20i0HtPwW0_faceswap_id02301_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03658\00077_id00371_t20i0HtPwW0_faceswap_id03713_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03658\00077_id00371_t20i0HtPwW0_faceswap_id04245_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03658\00077_id00371_t20i0HtPwW0_faceswap_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03713\00249_id02617_4EZjRXC4fLk_faceswap_id00220_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03713\00249_id02617_4EZjRXC4fLk_faceswap_id00832_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03713\00249_id02617_4EZjRXC4fLk_faceswap_id01178_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03713\00249_id02617_4EZjRXC4fLk_faceswap_id01532_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03713\00249_id02617_4EZjRXC4fLk_faceswap_id04245_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03747\00273_id02824_glBy_mYcXZw_faceswap_id01661_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03747\00273_id02824_glBy_mYcXZw_faceswap_id04055_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03747\00273_id02824_glBy_mYcXZw_faceswap_id04374_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03747\00273_id02824_glBy_mYcXZw_faceswap_id04547_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id03747\00273_id02824_glBy_mYcXZw_faceswap_id05235_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04055\00001_id05252_CMxIX3absYM_faceswap_id00577_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04055\00001_id05252_CMxIX3absYM_faceswap_id03569_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04055\00001_id05252_CMxIX3absYM_faceswap_id04705_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04055\00001_id05252_CMxIX3absYM_faceswap_id05251_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04055\00001_id05252_CMxIX3absYM_faceswap_id05980_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04374\00032_id04689_0YqK1ksKjLg_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04374\00032_id04689_0YqK1ksKjLg_faceswap_id01532_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04374\00032_id04689_0YqK1ksKjLg_faceswap_id04689_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04374\00032_id04689_0YqK1ksKjLg_faceswap_id05231_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04376\00181_id04437_2csrqaF55pk_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04376\00181_id04437_2csrqaF55pk_faceswap_id00460_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04376\00181_id04437_2csrqaF55pk_faceswap_id00577_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04376\00181_id04437_2csrqaF55pk_faceswap_id01838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04376\00181_id04437_2csrqaF55pk_faceswap_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04547\00052_2_id00832_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04547\00052_2_id02617_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04547\00052_2_id02808_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04547\00052_2_id02824_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04547\00052_2_id05251_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04705\00408_id05252_CMxIX3absYM_faceswap_id00460_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04705\00408_id05252_CMxIX3absYM_faceswap_id01838_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04705\00408_id05252_CMxIX3absYM_faceswap_id02948_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04705\00408_id05252_CMxIX3absYM_faceswap_id03747_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04705\00408_id05252_CMxIX3absYM_faceswap_id04736_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04736\00083_id05235_ASy8lP3SRtw_faceswap_id00592_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04736\00083_id05235_ASy8lP3SRtw_faceswap_id01907_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04736\00083_id05235_ASy8lP3SRtw_faceswap_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04736\00083_id05235_ASy8lP3SRtw_faceswap_id04245_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04820\00015_id02948__ZEDGNWjuFE_faceswap_id00568_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04820\00015_id02948__ZEDGNWjuFE_faceswap_id01783_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04820\00015_id02948__ZEDGNWjuFE_faceswap_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04820\00015_id02948__ZEDGNWjuFE_faceswap_id04376_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04820\00015_id02948__ZEDGNWjuFE_faceswap_id04689_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04939\00174_id02586_dEYzYDsbAeo_faceswap_id00460_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04939\00174_id02586_dEYzYDsbAeo_faceswap_id01907_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04939\00174_id02586_dEYzYDsbAeo_faceswap_id03747_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04939\00174_id02586_dEYzYDsbAeo_faceswap_id04939_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id04939\00174_id02586_dEYzYDsbAeo_faceswap_id05235_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05106\00078_id04820_64ybrA1atlM_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05106\00078_id04820_64ybrA1atlM_faceswap_id00592_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05106\00078_id04820_64ybrA1atlM_faceswap_id01661_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05106\00078_id04820_64ybrA1atlM_faceswap_id04437_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05106\00078_id04820_64ybrA1atlM_faceswap_id05231_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05231\00149_id01178_6XpgYMiKxhc_faceswap_id00371_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05231\00149_id01178_6XpgYMiKxhc_faceswap_id00592_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05231\00149_id01178_6XpgYMiKxhc_faceswap_id01907_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05231\00149_id01178_6XpgYMiKxhc_faceswap_id02301_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05231\00149_id01178_6XpgYMiKxhc_faceswap_id02721_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05235\00052_id01907_LBcRkuRq0uY_faceswap_id01783_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05235\00052_id01907_LBcRkuRq0uY_faceswap_id02808_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05235\00052_id01907_LBcRkuRq0uY_faceswap_id04055_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05235\00052_id01907_LBcRkuRq0uY_faceswap_id04736_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05235\00052_id01907_LBcRkuRq0uY_faceswap_id05251_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05251\00033_id01178_6XpgYMiKxhc_faceswap_id00568_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05251\00033_id01178_6XpgYMiKxhc_faceswap_id03569_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05251\00033_id01178_6XpgYMiKxhc_faceswap_id03658_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05251\00033_id01178_6XpgYMiKxhc_faceswap_id04437_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05252\00052_id01178_6XpgYMiKxhc_faceswap_id00220_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05252\00052_id01178_6XpgYMiKxhc_faceswap_id00832_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05252\00052_id01178_6XpgYMiKxhc_faceswap_id02824_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05252\00052_id01178_6XpgYMiKxhc_faceswap_id04376_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\African\women\id05252\00052_id01178_6XpgYMiKxhc_faceswap_id04437_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00018\00181_id01201_Q8XWfmNiWYA_faceswap_id00243_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00018\00181_id01201_Q8XWfmNiWYA_faceswap_id00777_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00018\00181_id01201_Q8XWfmNiWYA_faceswap_id00945_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00018\00181_id01201_Q8XWfmNiWYA_faceswap_id01239_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00018\00181_id01201_Q8XWfmNiWYA_faceswap_id03678_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00020\00206_id01182_zca-PHR_U40_faceswap_id00018_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00020\00206_id01182_zca-PHR_U40_faceswap_id00049_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00020\00206_id01182_zca-PHR_U40_faceswap_id00696_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00020\00206_id01182_zca-PHR_U40_faceswap_id01048_wavtolip.mp4,fake +FakeAVCeleb\FakeVideo-FakeAudio\Caucasian (American)\men\id00020\00206_id01182_zca-PHR_U40_faceswap_id01201_wavtolip.mp4,fake diff --git a/datasets/train/.gitkeep b/datasets/train/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/datasets/train/demo.txt b/datasets/train/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/datasets/val/.gitkeep b/datasets/val/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/datasets/val/demo.txt b/datasets/val/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/images/demo.txt b/images/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/images/fake_image.jpg b/images/fake_image.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2fbf4ddf24e608c55b5416b4fea4ee423445edd9 Binary files /dev/null and b/images/fake_image.jpg differ diff --git a/images/lady.jpg b/images/lady.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b4671285117a359030d434b19707525e3cb955b7 Binary files /dev/null and b/images/lady.jpg differ diff --git a/images/real.jpeg b/images/real.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..0b2ea22d45298bfec98b2639e8b4e3e9f80ac123 Binary files /dev/null and b/images/real.jpeg differ diff --git a/inference.py b/inference.py new file mode 100644 index 0000000000000000000000000000000000000000..d1e052dc2d97a9975de04b52b694173d49f3aa2d --- /dev/null +++ b/inference.py @@ -0,0 +1,211 @@ +import os +import cv2 +import torch +import argparse +import numpy as np +import torch.nn as nn +from models.TMC import ETMC +from models import image + +#Set random seed for reproducibility. +torch.manual_seed(42) + + +# Define the audio_args dictionary +audio_args = { + 'nb_samp': 64600, + 'first_conv': 1024, + 'in_channels': 1, + 'filts': [20, [20, 20], [20, 128], [128, 128]], + 'blocks': [2, 4], + 'nb_fc_node': 1024, + 'gru_node': 1024, + 'nb_gru_layer': 3, +} + + +def get_args(parser): + parser.add_argument("--batch_size", type=int, default=8) + parser.add_argument("--data_dir", type=str, default="datasets/train/fakeavceleb*") + parser.add_argument("--LOAD_SIZE", type=int, default=256) + parser.add_argument("--FINE_SIZE", type=int, default=224) + parser.add_argument("--dropout", type=float, default=0.2) + parser.add_argument("--gradient_accumulation_steps", type=int, default=1) + parser.add_argument("--hidden", nargs="*", type=int, default=[]) + parser.add_argument("--hidden_sz", type=int, default=768) + parser.add_argument("--img_embed_pool_type", type=str, default="avg", choices=["max", "avg"]) + parser.add_argument("--img_hidden_sz", type=int, default=1024) + parser.add_argument("--include_bn", type=int, default=True) + parser.add_argument("--lr", type=float, default=1e-4) + parser.add_argument("--lr_factor", type=float, default=0.3) + parser.add_argument("--lr_patience", type=int, default=10) + parser.add_argument("--max_epochs", type=int, default=500) + parser.add_argument("--n_workers", type=int, default=12) + parser.add_argument("--name", type=str, default="MMDF") + parser.add_argument("--num_image_embeds", type=int, default=1) + parser.add_argument("--patience", type=int, default=20) + parser.add_argument("--savedir", type=str, default="./savepath/") + parser.add_argument("--seed", type=int, default=1) + parser.add_argument("--n_classes", type=int, default=2) + parser.add_argument("--annealing_epoch", type=int, default=10) + parser.add_argument("--device", type=str, default='cpu') + parser.add_argument("--pretrained_image_encoder", type=bool, default = False) + parser.add_argument("--freeze_image_encoder", type=bool, default = False) + parser.add_argument("--pretrained_audio_encoder", type = bool, default=False) + parser.add_argument("--freeze_audio_encoder", type = bool, default = False) + parser.add_argument("--augment_dataset", type = bool, default = True) + + for key, value in audio_args.items(): + parser.add_argument(f"--{key}", type=type(value), default=value) + +def model_summary(args): + '''Prints the model summary.''' + model = ETMC(args) + + for name, layer in model.named_modules(): + print(name, layer) + +def load_multimodal_model(args): + '''Load multimodal model''' + model = ETMC(args) + ckpt = torch.load('checkpoints/model_best.pt', map_location = torch.device('cpu')) + model.load_state_dict(ckpt,strict = False) + model.eval() + return model + +def load_img_modality_model(args): + '''Loads image modality model.''' + rgb_encoder = image.ImageEncoder(args) + ckpt = torch.load('checkpoints/model_best.pt', map_location = torch.device('cpu')) + rgb_encoder.load_state_dict(ckpt,strict = False) + rgb_encoder.eval() + return rgb_encoder + +def load_spec_modality_model(args): + spec_encoder = image.RawNet(args) + ckpt = torch.load('checkpoints/model_best.pt', map_location = torch.device('cpu')) + spec_encoder.load_state_dict(ckpt,strict = False) + spec_encoder.eval() + return spec_encoder + + +#Load models. +parser = argparse.ArgumentParser(description="Train Models") +get_args(parser) +args, remaining_args = parser.parse_known_args() +assert remaining_args == [], remaining_args + +multimodal = load_multimodal_model(args) +spec_model = load_spec_modality_model(args) +img_model = load_img_modality_model(args) + + +def preprocess_img(face): + face = face / 255 + face = cv2.resize(face, (256, 256)) + face = face.transpose(2, 0, 1) #(W, H, C) -> (C, W, H) + face_pt = torch.unsqueeze(torch.Tensor(face), dim = 0) + return face_pt + +def preprocess_audio(audio_file): + audio_pt = torch.unsqueeze(torch.Tensor(audio_file), dim = 0) + return audio_pt + +def deepfakes_spec_predict(input_audio): + x, _ = input_audio + audio = preprocess_audio(x) + spec_grads = spec_model.forward(audio) + multimodal_grads = multimodal.spec_depth[0].forward(spec_grads) + + out = nn.Softmax()(multimodal_grads) + max = torch.argmax(out, dim = -1) #Index of the max value in the tensor. + max_value = out[max] #Actual value of the tensor. + max_value = np.argmax(out[max].detach().numpy()) + + if max_value > 0.5: + preds = round(100 - (max_value*100), 3) + text2 = f"The audio is REAL." + + else: + preds = round(max_value*100, 3) + text2 = f"The audio is FAKE." + + return text2 + +def deepfakes_image_predict(input_image): + face = preprocess_img(input_image) + + img_grads = img_model.forward(face) + multimodal_grads = multimodal.clf_rgb[0].forward(img_grads) + + out = nn.Softmax()(multimodal_grads) + max = torch.argmax(out, dim=-1) #Index of the max value in the tensor. + max = max.cpu().detach().numpy() + max_value = out[max] #Actual value of the tensor. + max_value = np.argmax(out[max].detach().numpy()) + + if max_value > 0.5: + preds = round(100 - (max_value*100), 3) + text2 = f"The image is REAL." + + else: + preds = round(max_value*100, 3) + text2 = f"The image is FAKE." + + return text2 + + +def preprocess_video(input_video, n_frames = 5): + v_cap = cv2.VideoCapture(input_video) + v_len = int(v_cap.get(cv2.CAP_PROP_FRAME_COUNT)) + + # Pick 'n_frames' evenly spaced frames to sample + if n_frames is None: + sample = np.arange(0, v_len) + else: + sample = np.linspace(0, v_len - 1, n_frames).astype(int) + + #Loop through frames. + frames = [] + for j in range(v_len): + success = v_cap.grab() + if j in sample: + # Load frame + success, frame = v_cap.retrieve() + if not success: + continue + frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + frame = preprocess_img(frame) + frames.append(frame) + v_cap.release() + return frames + + +def deepfakes_video_predict(input_video): + '''Perform inference on a video.''' + video_frames = preprocess_video(input_video) + + real_grads = [] + fake_grads = [] + + for face in video_frames: + img_grads = img_model.forward(face) + multimodal_grads = multimodal.clf_rgb[0].forward(img_grads) + + out = nn.Softmax()(multimodal_grads) + real_grads.append(out.cpu().detach().numpy()[0]) + print(f"Video out tensor shape is: {out.shape}, {out}") + + fake_grads.append(out.cpu().detach().numpy()[0]) + + real_grads_mean = np.mean(real_grads) + fake_grads_mean = np.mean(fake_grads) + + if real_grads_mean > fake_grads_mean: + res = round(real_grads_mean * 100, 3) + text = f"The video is REAL." + else: + res = round(100 - (real_grads_mean * 100), 3) + text = f"The video is FAKE." + return text + diff --git a/inference_2.py b/inference_2.py new file mode 100644 index 0000000000000000000000000000000000000000..db28055fce12a33e092d301ba9f92eab7900726e --- /dev/null +++ b/inference_2.py @@ -0,0 +1,265 @@ +import os +import cv2 +import onnx +import torch +import argparse +import numpy as np +import torch.nn as nn +from models.TMC import ETMC +from models import image + +from onnx2pytorch import ConvertModel + +onnx_model = onnx.load('checkpoints/efficientnet.onnx') +pytorch_model = ConvertModel(onnx_model) + +#Set random seed for reproducibility. +torch.manual_seed(42) + + +# Define the audio_args dictionary +audio_args = { + 'nb_samp': 64600, + 'first_conv': 1024, + 'in_channels': 1, + 'filts': [20, [20, 20], [20, 128], [128, 128]], + 'blocks': [2, 4], + 'nb_fc_node': 1024, + 'gru_node': 1024, + 'nb_gru_layer': 3, + 'nb_classes': 2 +} + +import torch +from torchvision import transforms +from PIL import Image +from timm import create_model +import os +import numpy as np + +# Constants +MODEL_PATH = r"models\ai_detector\pytorch_model.pth" +IMG_SIZE = 380 +DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu") +LABEL_MAPPING = {0: "AI-generated", 1: "Human-created"} + +# Load model from local file +model = create_model('efficientnet_b4', pretrained=False, num_classes=2) +state_dict = torch.load(MODEL_PATH, map_location=DEVICE) +model.load_state_dict(state_dict) +model.to(DEVICE).eval() + +# Define preprocessing transform +transform = transforms.Compose([ + transforms.Resize(IMG_SIZE + 20), + transforms.CenterCrop(IMG_SIZE), + transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225]), +]) + +def detect_ai_generated_image(img): + # Handle file path or numpy input + if isinstance(img, str) and os.path.isfile(img): + img = Image.open(img).convert("RGB") + elif isinstance(img, np.ndarray): + img = Image.fromarray(img.astype('uint8'), 'RGB') + elif isinstance(img, Image.Image): + img = img.convert("RGB") + else: + raise ValueError("Invalid image input") + + input_tensor = transform(img).unsqueeze(0).to(DEVICE) + + with torch.no_grad(): + output = model(input_tensor) + probs = torch.nn.functional.softmax(output, dim=1) + pred_class = probs.argmax().item() + confidence = probs[0, pred_class].item() + + return f"{LABEL_MAPPING[pred_class]} (confidence: {confidence:.2%})" + + +def get_args(parser): + parser.add_argument("--batch_size", type=int, default=8) + parser.add_argument("--data_dir", type=str, default="datasets/train/fakeavceleb*") + parser.add_argument("--LOAD_SIZE", type=int, default=256) + parser.add_argument("--FINE_SIZE", type=int, default=224) + parser.add_argument("--dropout", type=float, default=0.2) + parser.add_argument("--gradient_accumulation_steps", type=int, default=1) + parser.add_argument("--hidden", nargs="*", type=int, default=[]) + parser.add_argument("--hidden_sz", type=int, default=768) + parser.add_argument("--img_embed_pool_type", type=str, default="avg", choices=["max", "avg"]) + parser.add_argument("--img_hidden_sz", type=int, default=1024) + parser.add_argument("--include_bn", type=int, default=True) + parser.add_argument("--lr", type=float, default=1e-4) + parser.add_argument("--lr_factor", type=float, default=0.3) + parser.add_argument("--lr_patience", type=int, default=10) + parser.add_argument("--max_epochs", type=int, default=500) + parser.add_argument("--n_workers", type=int, default=12) + parser.add_argument("--name", type=str, default="MMDF") + parser.add_argument("--num_image_embeds", type=int, default=1) + parser.add_argument("--patience", type=int, default=20) + parser.add_argument("--savedir", type=str, default="./savepath/") + parser.add_argument("--seed", type=int, default=1) + parser.add_argument("--n_classes", type=int, default=2) + parser.add_argument("--annealing_epoch", type=int, default=10) + parser.add_argument("--device", type=str, default='cpu') + parser.add_argument("--pretrained_image_encoder", type=bool, default = False) + parser.add_argument("--freeze_image_encoder", type=bool, default = False) + parser.add_argument("--pretrained_audio_encoder", type = bool, default=False) + parser.add_argument("--freeze_audio_encoder", type = bool, default = False) + parser.add_argument("--augment_dataset", type = bool, default = True) + + for key, value in audio_args.items(): + parser.add_argument(f"--{key}", type=type(value), default=value) + +def model_summary(args): + '''Prints the model summary.''' + model = ETMC(args) + + for name, layer in model.named_modules(): + print(name, layer) + +def load_multimodal_model(args): + '''Load multimodal model''' + model = ETMC(args) + ckpt = torch.load('checkpoints/model.pth', map_location = torch.device('cpu')) + model.load_state_dict(ckpt, strict = True) + model.eval() + return model + +def load_img_modality_model(args): + '''Loads image modality model.''' + rgb_encoder = pytorch_model + + ckpt = torch.load('checkpoints/model.pth', map_location = torch.device('cpu')) + rgb_encoder.load_state_dict(ckpt['rgb_encoder'], strict = True) + rgb_encoder.eval() + return rgb_encoder + +def load_spec_modality_model(args): + spec_encoder = image.RawNet(args) + ckpt = torch.load('checkpoints/model.pth', map_location = torch.device('cpu')) + spec_encoder.load_state_dict(ckpt['spec_encoder'], strict = True) + spec_encoder.eval() + return spec_encoder + + +#Load models. +parser = argparse.ArgumentParser(description="Inference models") +get_args(parser) +args, remaining_args = parser.parse_known_args() +assert remaining_args == [], remaining_args + +spec_model = load_spec_modality_model(args) + +img_model = load_img_modality_model(args) + + +def preprocess_img(face): + face = face / 255 + face = cv2.resize(face, (256, 256)) + # face = face.transpose(2, 0, 1) #(W, H, C) -> (C, W, H) + face_pt = torch.unsqueeze(torch.Tensor(face), dim = 0) + return face_pt + +def preprocess_audio(audio_file): + audio_pt = torch.unsqueeze(torch.Tensor(audio_file), dim = 0) + return audio_pt + +def deepfakes_spec_predict(input_audio): + x, _ = input_audio + audio = preprocess_audio(x) + spec_grads = spec_model.forward(audio) + spec_grads_inv = np.exp(spec_grads.cpu().detach().numpy().squeeze()) + + # multimodal_grads = multimodal.spec_depth[0].forward(spec_grads) + + # out = nn.Softmax()(multimodal_grads) + # max = torch.argmax(out, dim = -1) #Index of the max value in the tensor. + # max_value = out[max] #Actual value of the tensor. + max_value = np.argmax(spec_grads_inv) + + if max_value > 0.5: + preds = round(100 - (max_value*100), 3) + text2 = f"The audio is REAL." + + else: + preds = round(max_value*100, 3) + text2 = f"The audio is FAKE." + + return text2 + +def deepfakes_image_predict(input_image): + face = preprocess_img(input_image) + print(f"Face shape is: {face.shape}") + img_grads = img_model.forward(face) + img_grads = img_grads.cpu().detach().numpy() + img_grads_np = np.squeeze(img_grads) + + if img_grads_np[0] > 0.5: + preds = round(img_grads_np[0] * 100, 3) + text2 = f"The image is REAL. \nConfidence score is: {preds}" + + else: + preds = round(img_grads_np[1] * 100, 3) + text2 = f"The image is FAKE. \nConfidence score is: {preds}" + + return text2 + + +def preprocess_video(input_video, n_frames = 3): + v_cap = cv2.VideoCapture(input_video) + v_len = int(v_cap.get(cv2.CAP_PROP_FRAME_COUNT)) + + # Pick 'n_frames' evenly spaced frames to sample + if n_frames is None: + sample = np.arange(0, v_len) + else: + sample = np.linspace(0, v_len - 1, n_frames).astype(int) + + #Loop through frames. + frames = [] + for j in range(v_len): + success = v_cap.grab() + if j in sample: + # Load frame + success, frame = v_cap.retrieve() + if not success: + continue + frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + frame = preprocess_img(frame) + frames.append(frame) + v_cap.release() + return frames + + +def deepfakes_video_predict(input_video): + '''Perform inference on a video.''' + video_frames = preprocess_video(input_video) + real_faces_list = [] + fake_faces_list = [] + + for face in video_frames: + # face = preprocess_img(face) + + img_grads = img_model.forward(face) + img_grads = img_grads.cpu().detach().numpy() + img_grads_np = np.squeeze(img_grads) + real_faces_list.append(img_grads_np[0]) + fake_faces_list.append(img_grads_np[1]) + + real_faces_mean = np.mean(real_faces_list) + fake_faces_mean = np.mean(fake_faces_list) + + if real_faces_mean > 0.5: + preds = round(real_faces_mean * 100, 3) + text2 = f"The video is REAL. \nConfidence score is: {preds}%" + + else: + preds = round(fake_faces_mean * 100, 3) + text2 = f"The video is FAKE. \nConfidence score is: {preds}%" + + return text2 + diff --git a/inference_3.py b/inference_3.py new file mode 100644 index 0000000000000000000000000000000000000000..7dddf4e5d5a78af741540ec3218c5abafe6a976b --- /dev/null +++ b/inference_3.py @@ -0,0 +1,17 @@ +# inference_2.py + +from PIL import Image +import numpy as np + +def detect_ai_generated_image(img): + # if img is a path, load as array + if isinstance(img, str): + img = np.array(Image.open(img).convert("RGB")) + + # 🧠 PLACEHOLDER: fake logic + # Replace with actual AI detection logic or model + mean_pixel = img.mean() + if mean_pixel > 120: + return "Possibly AI-generated" + else: + return "Likely Real" diff --git a/main.py b/main.py new file mode 100644 index 0000000000000000000000000000000000000000..ccb42fe8925fb550abe7dd681d39b66b47e58c21 --- /dev/null +++ b/main.py @@ -0,0 +1,247 @@ +import os +import argparse +from tqdm import tqdm +import torch.nn as nn +import tensorflow as tf +import torch.optim as optim + +from models.TMC import ETMC, ce_loss +import torchvision.transforms as transforms +from data.dfdt_dataset import FakeAVCelebDatasetTrain, FakeAVCelebDatasetVal + + +from utils.utils import * +from utils.logger import create_logger +from sklearn.metrics import accuracy_score +from torch.utils.tensorboard import SummaryWriter + +# Define the audio_args dictionary +audio_args = { + 'nb_samp': 64600, + 'first_conv': 1024, + 'in_channels': 1, + 'filts': [20, [20, 20], [20, 128], [128, 128]], + 'blocks': [2, 4], + 'nb_fc_node': 1024, + 'gru_node': 1024, + 'nb_gru_layer': 3, +} + + +def get_args(parser): + parser.add_argument("--batch_size", type=int, default=8) + parser.add_argument("--data_dir", type=str, default="datasets/train/fakeavceleb*") + parser.add_argument("--LOAD_SIZE", type=int, default=256) + parser.add_argument("--FINE_SIZE", type=int, default=224) + parser.add_argument("--dropout", type=float, default=0.2) + parser.add_argument("--gradient_accumulation_steps", type=int, default=1) + parser.add_argument("--hidden", nargs="*", type=int, default=[]) + parser.add_argument("--hidden_sz", type=int, default=768) + parser.add_argument("--img_embed_pool_type", type=str, default="avg", choices=["max", "avg"]) + parser.add_argument("--img_hidden_sz", type=int, default=1024) + parser.add_argument("--include_bn", type=int, default=True) + parser.add_argument("--lr", type=float, default=1e-4) + parser.add_argument("--lr_factor", type=float, default=0.3) + parser.add_argument("--lr_patience", type=int, default=10) + parser.add_argument("--max_epochs", type=int, default=500) + parser.add_argument("--n_workers", type=int, default=12) + parser.add_argument("--name", type=str, default="MMDF") + parser.add_argument("--num_image_embeds", type=int, default=1) + parser.add_argument("--patience", type=int, default=20) + parser.add_argument("--savedir", type=str, default="./savepath/") + parser.add_argument("--seed", type=int, default=1) + parser.add_argument("--n_classes", type=int, default=2) + parser.add_argument("--annealing_epoch", type=int, default=10) + parser.add_argument("--device", type=str, default='cpu') + parser.add_argument("--pretrained_image_encoder", type=bool, default = False) + parser.add_argument("--freeze_image_encoder", type=bool, default = True) + parser.add_argument("--pretrained_audio_encoder", type = bool, default=False) + parser.add_argument("--freeze_audio_encoder", type = bool, default = True) + parser.add_argument("--augment_dataset", type = bool, default = True) + + for key, value in audio_args.items(): + parser.add_argument(f"--{key}", type=type(value), default=value) + +def get_optimizer(model, args): + optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=1e-5) + return optimizer + + +def get_scheduler(optimizer, args): + return optim.lr_scheduler.ReduceLROnPlateau( + optimizer, "max", patience=args.lr_patience, verbose=True, factor=args.lr_factor + ) + +def model_forward(i_epoch, model, args, ce_loss, batch): + rgb, spec, tgt = batch['video_reshaped'], batch['spectrogram'], batch['label_map'] + rgb_pt = torch.Tensor(rgb.numpy()) + spec = spec.numpy() + spec_pt = torch.Tensor(spec) + tgt_pt = torch.Tensor(tgt.numpy()) + + if torch.cuda.is_available(): + rgb_pt, spec_pt, tgt_pt = rgb_pt.cuda(), spec_pt.cuda(), tgt_pt.cuda() + + # depth_alpha, rgb_alpha, depth_rgb_alpha = model(rgb_pt, spec_pt) + + # loss = ce_loss(tgt_pt, depth_alpha, args.n_classes, i_epoch, args.annealing_epoch) + \ + # ce_loss(tgt_pt, rgb_alpha, args.n_classes, i_epoch, args.annealing_epoch) + \ + # ce_loss(tgt_pt, depth_rgb_alpha, args.n_classes, i_epoch, args.annealing_epoch) + # return loss, depth_alpha, rgb_alpha, depth_rgb_alpha, tgt_pt + + depth_alpha, rgb_alpha, pseudo_alpha, depth_rgb_alpha = model(rgb_pt, spec_pt) + + loss = ce_loss(tgt_pt, depth_alpha, args.n_classes, i_epoch, args.annealing_epoch) + \ + ce_loss(tgt_pt, rgb_alpha, args.n_classes, i_epoch, args.annealing_epoch) + \ + ce_loss(tgt_pt, pseudo_alpha, args.n_classes, i_epoch, args.annealing_epoch) + \ + ce_loss(tgt_pt, depth_rgb_alpha, args.n_classes, i_epoch, args.annealing_epoch) + return loss, depth_alpha, rgb_alpha, depth_rgb_alpha, tgt_pt + + + +def model_eval(i_epoch, data, model, args, criterion): + model.eval() + with torch.no_grad(): + losses, depth_preds, rgb_preds, depthrgb_preds, tgts = [], [], [], [], [] + for batch in tqdm(data): + loss, depth_alpha, rgb_alpha, depth_rgb_alpha, tgt = model_forward(i_epoch, model, args, criterion, batch) + losses.append(loss.item()) + + depth_pred = depth_alpha.argmax(dim=1).cpu().detach().numpy() + rgb_pred = rgb_alpha.argmax(dim=1).cpu().detach().numpy() + depth_rgb_pred = depth_rgb_alpha.argmax(dim=1).cpu().detach().numpy() + + depth_preds.append(depth_pred) + rgb_preds.append(rgb_pred) + depthrgb_preds.append(depth_rgb_pred) + tgt = tgt.cpu().detach().numpy() + tgts.append(tgt) + + metrics = {"loss": np.mean(losses)} + print(f"Mean loss is: {metrics['loss']}") + + tgts = [l for sl in tgts for l in sl] + depth_preds = [l for sl in depth_preds for l in sl] + rgb_preds = [l for sl in rgb_preds for l in sl] + depthrgb_preds = [l for sl in depthrgb_preds for l in sl] + metrics["spec_acc"] = accuracy_score(tgts, depth_preds) + metrics["rgb_acc"] = accuracy_score(tgts, rgb_preds) + metrics["specrgb_acc"] = accuracy_score(tgts, depthrgb_preds) + return metrics + +def write_weight_histograms(writer, step, model): + for idx, item in enumerate(model.named_parameters()): + name = item[0] + weights = item[1].data + if weights.size(dim = 0) > 2: + try: + writer.add_histogram(name, weights, idx) + except ValueError as e: + continue + +writer = SummaryWriter() + +def train(args): + set_seed(args.seed) + args.savedir = os.path.join(args.savedir, args.name) + os.makedirs(args.savedir, exist_ok=True) + + train_ds = FakeAVCelebDatasetTrain(args) + train_ds = train_ds.load_features_from_tfrec() + + val_ds = FakeAVCelebDatasetVal(args) + val_ds = val_ds.load_features_from_tfrec() + + model = ETMC(args) + optimizer = get_optimizer(model, args) + scheduler = get_scheduler(optimizer, args) + logger = create_logger("%s/logfile.log" % args.savedir, args) + if torch.cuda.is_available(): + model.cuda() + + torch.save(args, os.path.join(args.savedir, "checkpoint.pt")) + start_epoch, global_step, n_no_improve, best_metric = 0, 0, 0, -np.inf + + for i_epoch in range(start_epoch, args.max_epochs): + train_losses = [] + model.train() + optimizer.zero_grad() + + for index, batch in tqdm(enumerate(train_ds)): + loss, depth_out, rgb_out, depthrgb, tgt = model_forward(i_epoch, model, args, ce_loss, batch) + if args.gradient_accumulation_steps > 1: + loss = loss / args.gradient_accumulation_steps + + train_losses.append(loss.item()) + loss.backward() + global_step += 1 + if global_step % args.gradient_accumulation_steps == 0: + optimizer.step() + optimizer.zero_grad() + + #Write weight histograms to Tensorboard. + write_weight_histograms(writer, i_epoch, model) + + model.eval() + metrics = model_eval( + np.inf, val_ds, model, args, ce_loss + ) + logger.info("Train Loss: {:.4f}".format(np.mean(train_losses))) + log_metrics("val", metrics, logger) + logger.info( + "{}: Loss: {:.5f} | spec_acc: {:.5f}, rgb_acc: {:.5f}, depth rgb acc: {:.5f}".format( + "val", metrics["loss"], metrics["spec_acc"], metrics["rgb_acc"], metrics["specrgb_acc"] + ) + ) + tuning_metric = metrics["specrgb_acc"] + + scheduler.step(tuning_metric) + is_improvement = tuning_metric > best_metric + if is_improvement: + best_metric = tuning_metric + n_no_improve = 0 + else: + n_no_improve += 1 + + save_checkpoint( + { + "epoch": i_epoch + 1, + "optimizer": optimizer.state_dict(), + "scheduler": scheduler.state_dict(), + "n_no_improve": n_no_improve, + "best_metric": best_metric, + }, + is_improvement, + args.savedir, + ) + + if n_no_improve >= args.patience: + logger.info("No improvement. Breaking out of loop.") + break + writer.close() + # load_checkpoint(model, os.path.join(args.savedir, "model_best.pt")) + model.eval() + test_metrics = model_eval( + np.inf, val_ds, model, args, ce_loss + ) + logger.info( + "{}: Loss: {:.5f} | spec_acc: {:.5f}, rgb_acc: {:.5f}, depth rgb acc: {:.5f}".format( + "Test", test_metrics["loss"], test_metrics["spec_acc"], test_metrics["rgb_acc"], + test_metrics["depthrgb_acc"] + ) + ) + log_metrics(f"Test", test_metrics, logger) + + +def cli_main(): + parser = argparse.ArgumentParser(description="Train Models") + get_args(parser) + args, remaining_args = parser.parse_known_args() + assert remaining_args == [], remaining_args + train(args) + + +if __name__ == "__main__": + import warnings + warnings.filterwarnings("ignore") + cli_main() diff --git a/model.py b/model.py new file mode 100644 index 0000000000000000000000000000000000000000..5d412a3608ddeff908b17e166d531d5d1fe09025 --- /dev/null +++ b/model.py @@ -0,0 +1,3 @@ +from torchvision.models import efficientnet_v2_s, EfficientNet_V2_S_Weights + +model = efficientnet_v2_s(weights=EfficientNet_V2_S_Weights.IMAGENET1K_V1) diff --git a/models/TMC.py b/models/TMC.py new file mode 100644 index 0000000000000000000000000000000000000000..09ea4821f7181900aff7d8af18bf0e1323b4e7e2 --- /dev/null +++ b/models/TMC.py @@ -0,0 +1,156 @@ +import torch +import torch.nn as nn +from models import image +import torch.nn.functional as F + + +# loss function +def KL(alpha, c): + if torch.cuda.is_available(): + beta = torch.ones((1, c)).cuda() + else: + beta = torch.ones((1, c)) + S_alpha = torch.sum(alpha, dim=1, keepdim=True) + S_beta = torch.sum(beta, dim=1, keepdim=True) + lnB = torch.lgamma(S_alpha) - torch.sum(torch.lgamma(alpha), dim=1, keepdim=True) + lnB_uni = torch.sum(torch.lgamma(beta), dim=1, keepdim=True) - torch.lgamma(S_beta) + dg0 = torch.digamma(S_alpha) + dg1 = torch.digamma(alpha) + kl = torch.sum((alpha - beta) * (dg1 - dg0), dim=1, keepdim=True) + lnB + lnB_uni + return kl + +def ce_loss(p, alpha, c, global_step, annealing_step): + S = torch.sum(alpha, dim=1, keepdim=True) + E = alpha - 1 + label = p + A = torch.sum(label * (torch.digamma(S) - torch.digamma(alpha)), dim=1, keepdim=True) + + annealing_coef = min(1, global_step / annealing_step) + alp = E * (1 - label) + 1 + B = annealing_coef * KL(alp, c) + return torch.mean((A + B)) + + +class TMC(nn.Module): + def __init__(self, args): + super(TMC, self).__init__() + self.args = args + self.rgbenc = image.ImageEncoder(args) + self.specenc = image.RawNet(args) + + spec_last_size = args.img_hidden_sz * 1 + rgb_last_size = args.img_hidden_sz * args.num_image_embeds + self.spec_depth = nn.ModuleList() + self.clf_rgb = nn.ModuleList() + + for hidden in args.hidden: + self.spec_depth.append(nn.Linear(spec_last_size, hidden)) + self.spec_depth.append(nn.ReLU()) + self.spec_depth.append(nn.Dropout(args.dropout)) + spec_last_size = hidden + self.spec_depth.append(nn.Linear(spec_last_size, args.n_classes)) + + for hidden in args.hidden: + self.clf_rgb.append(nn.Linear(rgb_last_size, hidden)) + self.clf_rgb.append(nn.ReLU()) + self.clf_rgb.append(nn.Dropout(args.dropout)) + rgb_last_size = hidden + self.clf_rgb.append(nn.Linear(rgb_last_size, args.n_classes)) + + def DS_Combin_two(self, alpha1, alpha2): + # Calculate the merger of two DS evidences + alpha = dict() + alpha[0], alpha[1] = alpha1, alpha2 + b, S, E, u = dict(), dict(), dict(), dict() + for v in range(2): + S[v] = torch.sum(alpha[v], dim=1, keepdim=True) + E[v] = alpha[v] - 1 + b[v] = E[v] / (S[v].expand(E[v].shape)) + u[v] = self.args.n_classes / S[v] + + # b^0 @ b^(0+1) + bb = torch.bmm(b[0].view(-1, self.args.n_classes, 1), b[1].view(-1, 1, self.args.n_classes)) + # b^0 * u^1 + uv1_expand = u[1].expand(b[0].shape) + bu = torch.mul(b[0], uv1_expand) + # b^1 * u^0 + uv_expand = u[0].expand(b[0].shape) + ub = torch.mul(b[1], uv_expand) + # calculate K + bb_sum = torch.sum(bb, dim=(1, 2), out=None) + bb_diag = torch.diagonal(bb, dim1=-2, dim2=-1).sum(-1) + # bb_diag1 = torch.diag(torch.mm(b[v], torch.transpose(b[v+1], 0, 1))) + K = bb_sum - bb_diag + + # calculate b^a + b_a = (torch.mul(b[0], b[1]) + bu + ub) / ((1 - K).view(-1, 1).expand(b[0].shape)) + # calculate u^a + u_a = torch.mul(u[0], u[1]) / ((1 - K).view(-1, 1).expand(u[0].shape)) + # test = torch.sum(b_a, dim = 1, keepdim = True) + u_a #Verify programming errors + + # calculate new S + S_a = self.args.n_classes / u_a + # calculate new e_k + e_a = torch.mul(b_a, S_a.expand(b_a.shape)) + alpha_a = e_a + 1 + return alpha_a + + def forward(self, rgb, spec): + spec = self.specenc(spec) + spec = torch.flatten(spec, start_dim=1) + + rgb = self.rgbenc(rgb) + rgb = torch.flatten(rgb, start_dim=1) + + spec_out = spec + + for layer in self.spec_depth: + spec_out = layer(spec_out) + + rgb_out = rgb + + for layer in self.clf_rgb: + rgb_out = layer(rgb_out) + + spec_evidence, rgb_evidence = F.softplus(spec_out), F.softplus(rgb_out) + spec_alpha, rgb_alpha = spec_evidence+1, rgb_evidence+1 + spec_rgb_alpha = self.DS_Combin_two(spec_alpha, rgb_alpha) + return spec_alpha, rgb_alpha, spec_rgb_alpha + + +class ETMC(TMC): + def __init__(self, args): + super(ETMC, self).__init__(args) + last_size = args.img_hidden_sz * args.num_image_embeds + args.img_hidden_sz * args.num_image_embeds + self.clf = nn.ModuleList() + for hidden in args.hidden: + self.clf.append(nn.Linear(last_size, hidden)) + self.clf.append(nn.ReLU()) + self.clf.append(nn.Dropout(args.dropout)) + last_size = hidden + self.clf.append(nn.Linear(last_size, args.n_classes)) + + def forward(self, rgb, spec): + spec = self.specenc(spec) + spec = torch.flatten(spec, start_dim=1) + + rgb = self.rgbenc(rgb) + rgb = torch.flatten(rgb, start_dim=1) + + spec_out = spec + for layer in self.spec_depth: + spec_out = layer(spec_out) + + rgb_out = rgb + for layer in self.clf_rgb: + rgb_out = layer(rgb_out) + + pseudo_out = torch.cat([rgb, spec], -1) + for layer in self.clf: + pseudo_out = layer(pseudo_out) + + depth_evidence, rgb_evidence, pseudo_evidence = F.softplus(spec_out), F.softplus(rgb_out), F.softplus(pseudo_out) + depth_alpha, rgb_alpha, pseudo_alpha = depth_evidence+1, rgb_evidence+1, pseudo_evidence+1 + depth_rgb_alpha = self.DS_Combin_two(self.DS_Combin_two(depth_alpha, rgb_alpha), pseudo_alpha) + return depth_alpha, rgb_alpha, pseudo_alpha, depth_rgb_alpha + diff --git a/models/__pycache__/TMC.cpython-310.pyc b/models/__pycache__/TMC.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54ea7440d0033ab00780b5b2d5de2a1407c63026 Binary files /dev/null and b/models/__pycache__/TMC.cpython-310.pyc differ diff --git a/models/__pycache__/TMC.cpython-39.pyc b/models/__pycache__/TMC.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..893f43ebfb7e50cd93eb5ea095cb25dad7ced464 Binary files /dev/null and b/models/__pycache__/TMC.cpython-39.pyc differ diff --git a/models/__pycache__/classifiers.cpython-310.pyc b/models/__pycache__/classifiers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c124e749d36923b7df48af0e6508a7dd0187b361 Binary files /dev/null and b/models/__pycache__/classifiers.cpython-310.pyc differ diff --git a/models/__pycache__/classifiers.cpython-39.pyc b/models/__pycache__/classifiers.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0047c5e01f6b90b4821fecb9bb6551c2ec9f8bb Binary files /dev/null and b/models/__pycache__/classifiers.cpython-39.pyc differ diff --git a/models/__pycache__/demo.txt b/models/__pycache__/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/models/__pycache__/image.cpython-310.pyc b/models/__pycache__/image.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..849389a616b85594810aa7bf32f77fa7b3f47299 Binary files /dev/null and b/models/__pycache__/image.cpython-310.pyc differ diff --git a/models/__pycache__/image.cpython-39.pyc b/models/__pycache__/image.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a5f0326003e7c69ba236c526acdcf7b4a14573e Binary files /dev/null and b/models/__pycache__/image.cpython-39.pyc differ diff --git a/models/__pycache__/rawnet.cpython-310.pyc b/models/__pycache__/rawnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cce37fd7f13ce97bf346c55380c6d820a02b8cbf Binary files /dev/null and b/models/__pycache__/rawnet.cpython-310.pyc differ diff --git a/models/__pycache__/rawnet.cpython-39.pyc b/models/__pycache__/rawnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a49e1dd2531948f905db9feda6edb0efcbe8304 Binary files /dev/null and b/models/__pycache__/rawnet.cpython-39.pyc differ diff --git a/models/ai_detector/pytorch_model.pth b/models/ai_detector/pytorch_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1b9bcfb8a8232741511bdd3430847341508efcef --- /dev/null +++ b/models/ai_detector/pytorch_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99ab5db604d8b018d957397801b0d69c5e8844726db64c4539623e549049498c +size 70969684 diff --git a/models/classifiers.py b/models/classifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..43d1fd36d2b90065d0fa9a8acdeb2905f604f133 --- /dev/null +++ b/models/classifiers.py @@ -0,0 +1,172 @@ +from functools import partial + +import numpy as np +import torch +from timm.models.efficientnet import tf_efficientnet_b4_ns, tf_efficientnet_b3_ns, \ + tf_efficientnet_b5_ns, tf_efficientnet_b2_ns, tf_efficientnet_b6_ns, tf_efficientnet_b7_ns +from torch import nn +from torch.nn.modules.dropout import Dropout +from torch.nn.modules.linear import Linear +from torch.nn.modules.pooling import AdaptiveAvgPool2d + +encoder_params = { + "tf_efficientnet_b3_ns": { + "features": 1536, + "init_op": partial(tf_efficientnet_b3_ns, pretrained=True, drop_path_rate=0.2) + }, + "tf_efficientnet_b2_ns": { + "features": 1408, + "init_op": partial(tf_efficientnet_b2_ns, pretrained=False, drop_path_rate=0.2) + }, + "tf_efficientnet_b4_ns": { + "features": 1792, + "init_op": partial(tf_efficientnet_b4_ns, pretrained=True, drop_path_rate=0.5) + }, + "tf_efficientnet_b5_ns": { + "features": 2048, + "init_op": partial(tf_efficientnet_b5_ns, pretrained=True, drop_path_rate=0.2) + }, + "tf_efficientnet_b4_ns_03d": { + "features": 1792, + "init_op": partial(tf_efficientnet_b4_ns, pretrained=True, drop_path_rate=0.3) + }, + "tf_efficientnet_b5_ns_03d": { + "features": 2048, + "init_op": partial(tf_efficientnet_b5_ns, pretrained=True, drop_path_rate=0.3) + }, + "tf_efficientnet_b5_ns_04d": { + "features": 2048, + "init_op": partial(tf_efficientnet_b5_ns, pretrained=True, drop_path_rate=0.4) + }, + "tf_efficientnet_b6_ns": { + "features": 2304, + "init_op": partial(tf_efficientnet_b6_ns, pretrained=True, drop_path_rate=0.2) + }, + "tf_efficientnet_b7_ns": { + "features": 2560, + "init_op": partial(tf_efficientnet_b7_ns, pretrained=False, drop_path_rate=0.2) + }, + "tf_efficientnet_b6_ns_04d": { + "features": 2304, + "init_op": partial(tf_efficientnet_b6_ns, pretrained=True, drop_path_rate=0.4) + }, +} + + +def setup_srm_weights(input_channels: int = 3) -> torch.Tensor: + """Creates the SRM kernels for noise analysis.""" + # note: values taken from Zhou et al., "Learning Rich Features for Image Manipulation Detection", CVPR2018 + srm_kernel = torch.from_numpy(np.array([ + [ # srm 1/2 horiz + [0., 0., 0., 0., 0.], # noqa: E241,E201 + [0., 0., 0., 0., 0.], # noqa: E241,E201 + [0., 1., -2., 1., 0.], # noqa: E241,E201 + [0., 0., 0., 0., 0.], # noqa: E241,E201 + [0., 0., 0., 0., 0.], # noqa: E241,E201 + ], [ # srm 1/4 + [0., 0., 0., 0., 0.], # noqa: E241,E201 + [0., -1., 2., -1., 0.], # noqa: E241,E201 + [0., 2., -4., 2., 0.], # noqa: E241,E201 + [0., -1., 2., -1., 0.], # noqa: E241,E201 + [0., 0., 0., 0., 0.], # noqa: E241,E201 + ], [ # srm 1/12 + [-1., 2., -2., 2., -1.], # noqa: E241,E201 + [2., -6., 8., -6., 2.], # noqa: E241,E201 + [-2., 8., -12., 8., -2.], # noqa: E241,E201 + [2., -6., 8., -6., 2.], # noqa: E241,E201 + [-1., 2., -2., 2., -1.], # noqa: E241,E201 + ] + ])).float() + srm_kernel[0] /= 2 + srm_kernel[1] /= 4 + srm_kernel[2] /= 12 + return srm_kernel.view(3, 1, 5, 5).repeat(1, input_channels, 1, 1) + + +def setup_srm_layer(input_channels: int = 3) -> torch.nn.Module: + """Creates a SRM convolution layer for noise analysis.""" + weights = setup_srm_weights(input_channels) + conv = torch.nn.Conv2d(input_channels, out_channels=3, kernel_size=5, stride=1, padding=2, bias=False) + with torch.no_grad(): + conv.weight = torch.nn.Parameter(weights, requires_grad=False) + return conv + + +class DeepFakeClassifierSRM(nn.Module): + def __init__(self, encoder, dropout_rate=0.5) -> None: + super().__init__() + self.encoder = encoder_params[encoder]["init_op"]() + self.avg_pool = AdaptiveAvgPool2d((1, 1)) + self.srm_conv = setup_srm_layer(3) + self.dropout = Dropout(dropout_rate) + self.fc = Linear(encoder_params[encoder]["features"], 1) + + def forward(self, x): + noise = self.srm_conv(x) + x = self.encoder.forward_features(noise) + x = self.avg_pool(x).flatten(1) + x = self.dropout(x) + x = self.fc(x) + return x + + +class GlobalWeightedAvgPool2d(nn.Module): + """ + Global Weighted Average Pooling from paper "Global Weighted Average + Pooling Bridges Pixel-level Localization and Image-level Classification" + """ + + def __init__(self, features: int, flatten=False): + super().__init__() + self.conv = nn.Conv2d(features, 1, kernel_size=1, bias=True) + self.flatten = flatten + + def fscore(self, x): + m = self.conv(x) + m = m.sigmoid().exp() + return m + + def norm(self, x: torch.Tensor): + return x / x.sum(dim=[2, 3], keepdim=True) + + def forward(self, x): + input_x = x + x = self.fscore(x) + x = self.norm(x) + x = x * input_x + x = x.sum(dim=[2, 3], keepdim=not self.flatten) + return x + + +class DeepFakeClassifier(nn.Module): + def __init__(self, encoder, dropout_rate=0.0) -> None: + super().__init__() + self.encoder = encoder_params[encoder]["init_op"]() + self.avg_pool = AdaptiveAvgPool2d((1, 1)) + self.dropout = Dropout(dropout_rate) + self.fc = Linear(encoder_params[encoder]["features"], 1) + + def forward(self, x): + x = self.encoder.forward_features(x) + x = self.avg_pool(x).flatten(1) + x = self.dropout(x) + x = self.fc(x) + return x + + + + +class DeepFakeClassifierGWAP(nn.Module): + def __init__(self, encoder, dropout_rate=0.5) -> None: + super().__init__() + self.encoder = encoder_params[encoder]["init_op"]() + self.avg_pool = GlobalWeightedAvgPool2d(encoder_params[encoder]["features"]) + self.dropout = Dropout(dropout_rate) + self.fc = Linear(encoder_params[encoder]["features"], 1) + + def forward(self, x): + x = self.encoder.forward_features(x) + x = self.avg_pool(x).flatten(1) + x = self.dropout(x) + x = self.fc(x) + return x \ No newline at end of file diff --git a/models/demo.txt b/models/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/models/efficientnet_v2_s-dd5fe13b.pth b/models/efficientnet_v2_s-dd5fe13b.pth new file mode 100644 index 0000000000000000000000000000000000000000..02c38fb95d36e1ddd9eacebf2a16426155219fea --- /dev/null +++ b/models/efficientnet_v2_s-dd5fe13b.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd5fe13b1d60ec15317ccc8ca158186e134d3366c3dde9cb9a4e301f2dc66c74 +size 86721253 diff --git a/models/image.py b/models/image.py new file mode 100644 index 0000000000000000000000000000000000000000..a821f3d30e9971af89064ce48ed3985789a033f6 --- /dev/null +++ b/models/image.py @@ -0,0 +1,195 @@ +import re +import os +import wget +import torch +import torchvision +import torch.nn as nn +import torch.nn.functional as F +from models.rawnet import SincConv, Residual_block +from models.classifiers import DeepFakeClassifier + +class ImageEncoder(nn.Module): + def __init__(self, args): + super(ImageEncoder, self).__init__() + self.device = args.device + self.args = args + self.flatten = nn.Flatten() + self.sigmoid = nn.Sigmoid() + # self.fc = nn.Linear(in_features=2560, out_features = 2) + self.pretrained_image_encoder = args.pretrained_image_encoder + self.freeze_image_encoder = args.freeze_image_encoder + + if self.pretrained_image_encoder == False: + self.model = DeepFakeClassifier(encoder = "tf_efficientnet_b7_ns").to(self.device) + + else: + self.pretrained_ckpt = torch.load('pretrained\\final_999_DeepFakeClassifier_tf_efficientnet_b7_ns_0_23', map_location = torch.device(self.args.device)) + self.state_dict = self.pretrained_ckpt.get("state_dict", self.pretrained_ckpt) + + self.model = DeepFakeClassifier(encoder = "tf_efficientnet_b7_ns").to(self.device) + print("Loading pretrained image encoder...") + self.model.load_state_dict({re.sub("^module.", "", k): v for k, v in self.state_dict.items()}, strict=True) + print("Loaded pretrained image encoder.") + + if self.freeze_image_encoder == True: + for idx, param in self.model.named_parameters(): + param.requires_grad = False + + # self.model.fc = nn.Identity() + + def forward(self, x): + x = self.model(x) + out = self.sigmoid(x) + # x = self.flatten(x) + # out = self.fc(x) + return out + + +class RawNet(nn.Module): + def __init__(self, args): + super(RawNet, self).__init__() + + self.device=args.device + self.filts = [20, [20, 20], [20, 128], [128, 128]] + + self.Sinc_conv=SincConv(device=self.device, + out_channels = self.filts[0], + kernel_size = 1024, + in_channels = args.in_channels) + + self.first_bn = nn.BatchNorm1d(num_features = self.filts[0]) + self.selu = nn.SELU(inplace=True) + self.block0 = nn.Sequential(Residual_block(nb_filts = self.filts[1], first = True)) + self.block1 = nn.Sequential(Residual_block(nb_filts = self.filts[1])) + self.block2 = nn.Sequential(Residual_block(nb_filts = self.filts[2])) + self.filts[2][0] = self.filts[2][1] + self.block3 = nn.Sequential(Residual_block(nb_filts = self.filts[2])) + self.block4 = nn.Sequential(Residual_block(nb_filts = self.filts[2])) + self.block5 = nn.Sequential(Residual_block(nb_filts = self.filts[2])) + self.avgpool = nn.AdaptiveAvgPool1d(1) + + self.fc_attention0 = self._make_attention_fc(in_features = self.filts[1][-1], + l_out_features = self.filts[1][-1]) + self.fc_attention1 = self._make_attention_fc(in_features = self.filts[1][-1], + l_out_features = self.filts[1][-1]) + self.fc_attention2 = self._make_attention_fc(in_features = self.filts[2][-1], + l_out_features = self.filts[2][-1]) + self.fc_attention3 = self._make_attention_fc(in_features = self.filts[2][-1], + l_out_features = self.filts[2][-1]) + self.fc_attention4 = self._make_attention_fc(in_features = self.filts[2][-1], + l_out_features = self.filts[2][-1]) + self.fc_attention5 = self._make_attention_fc(in_features = self.filts[2][-1], + l_out_features = self.filts[2][-1]) + + self.bn_before_gru = nn.BatchNorm1d(num_features = self.filts[2][-1]) + self.gru = nn.GRU(input_size = self.filts[2][-1], + hidden_size = args.gru_node, + num_layers = args.nb_gru_layer, + batch_first = True) + + self.fc1_gru = nn.Linear(in_features = args.gru_node, + out_features = args.nb_fc_node) + + self.fc2_gru = nn.Linear(in_features = args.nb_fc_node, + out_features = args.nb_classes ,bias=True) + + self.sig = nn.Sigmoid() + self.logsoftmax = nn.LogSoftmax(dim=1) + self.pretrained_audio_encoder = args.pretrained_audio_encoder + self.freeze_audio_encoder = args.freeze_audio_encoder + + if self.pretrained_audio_encoder == True: + print("Loading pretrained audio encoder") + ckpt = torch.load('pretrained\\RawNet.pth', map_location = torch.device(self.device)) + print("Loaded pretrained audio encoder") + self.load_state_dict(ckpt, strict = True) + + if self.freeze_audio_encoder: + for param in self.parameters(): + param.requires_grad = False + + + def forward(self, x, y = None): + + nb_samp = x.shape[0] + len_seq = x.shape[1] + x=x.view(nb_samp,1,len_seq) + + x = self.Sinc_conv(x) + x = F.max_pool1d(torch.abs(x), 3) + x = self.first_bn(x) + x = self.selu(x) + + x0 = self.block0(x) + y0 = self.avgpool(x0).view(x0.size(0), -1) # torch.Size([batch, filter]) + y0 = self.fc_attention0(y0) + y0 = self.sig(y0).view(y0.size(0), y0.size(1), -1) # torch.Size([batch, filter, 1]) + x = x0 * y0 + y0 # (batch, filter, time) x (batch, filter, 1) + + + x1 = self.block1(x) + y1 = self.avgpool(x1).view(x1.size(0), -1) # torch.Size([batch, filter]) + y1 = self.fc_attention1(y1) + y1 = self.sig(y1).view(y1.size(0), y1.size(1), -1) # torch.Size([batch, filter, 1]) + x = x1 * y1 + y1 # (batch, filter, time) x (batch, filter, 1) + + x2 = self.block2(x) + y2 = self.avgpool(x2).view(x2.size(0), -1) # torch.Size([batch, filter]) + y2 = self.fc_attention2(y2) + y2 = self.sig(y2).view(y2.size(0), y2.size(1), -1) # torch.Size([batch, filter, 1]) + x = x2 * y2 + y2 # (batch, filter, time) x (batch, filter, 1) + + x3 = self.block3(x) + y3 = self.avgpool(x3).view(x3.size(0), -1) # torch.Size([batch, filter]) + y3 = self.fc_attention3(y3) + y3 = self.sig(y3).view(y3.size(0), y3.size(1), -1) # torch.Size([batch, filter, 1]) + x = x3 * y3 + y3 # (batch, filter, time) x (batch, filter, 1) + + x4 = self.block4(x) + y4 = self.avgpool(x4).view(x4.size(0), -1) # torch.Size([batch, filter]) + y4 = self.fc_attention4(y4) + y4 = self.sig(y4).view(y4.size(0), y4.size(1), -1) # torch.Size([batch, filter, 1]) + x = x4 * y4 + y4 # (batch, filter, time) x (batch, filter, 1) + + x5 = self.block5(x) + y5 = self.avgpool(x5).view(x5.size(0), -1) # torch.Size([batch, filter]) + y5 = self.fc_attention5(y5) + y5 = self.sig(y5).view(y5.size(0), y5.size(1), -1) # torch.Size([batch, filter, 1]) + x = x5 * y5 + y5 # (batch, filter, time) x (batch, filter, 1) + + x = self.bn_before_gru(x) + x = self.selu(x) + x = x.permute(0, 2, 1) #(batch, filt, time) >> (batch, time, filt) + self.gru.flatten_parameters() + x, _ = self.gru(x) + x = x[:,-1,:] + x = self.fc1_gru(x) + x = self.fc2_gru(x) + output=self.logsoftmax(x) + + return output + + + + def _make_attention_fc(self, in_features, l_out_features): + + l_fc = [] + + l_fc.append(nn.Linear(in_features = in_features, + out_features = l_out_features)) + + + + return nn.Sequential(*l_fc) + + + def _make_layer(self, nb_blocks, nb_filts, first = False): + layers = [] + #def __init__(self, nb_filts, first = False): + for i in range(nb_blocks): + first = first if i == 0 else False + layers.append(Residual_block(nb_filts = nb_filts, + first = first)) + if i == 0: nb_filts[0] = nb_filts[1] + + return nn.Sequential(*layers) \ No newline at end of file diff --git a/models/rawnet.py b/models/rawnet.py new file mode 100644 index 0000000000000000000000000000000000000000..7f3c16800d40b9dd6ff029613b79d5d9784784d0 --- /dev/null +++ b/models/rawnet.py @@ -0,0 +1,360 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch import Tensor +import numpy as np +from torch.utils import data +from collections import OrderedDict +from torch.nn.parameter import Parameter + + +class SincConv(nn.Module): + @staticmethod + def to_mel(hz): + return 2595 * np.log10(1 + hz / 700) + + @staticmethod + def to_hz(mel): + return 700 * (10 ** (mel / 2595) - 1) + + + def __init__(self, device,out_channels, kernel_size,in_channels=1,sample_rate=16000, + stride=1, padding=0, dilation=1, bias=False, groups=1): + + super(SincConv,self).__init__() + + if in_channels != 1: + + msg = "SincConv only support one input channel (here, in_channels = {%i})" % (in_channels) + raise ValueError(msg) + + self.out_channels = out_channels + self.kernel_size = kernel_size + self.sample_rate=sample_rate + + # Forcing the filters to be odd (i.e, perfectly symmetrics) + if kernel_size%2==0: + self.kernel_size=self.kernel_size+1 + + self.device=device + self.stride = stride + self.padding = padding + self.dilation = dilation + + if bias: + raise ValueError('SincConv does not support bias.') + if groups > 1: + raise ValueError('SincConv does not support groups.') + + + # initialize filterbanks using Mel scale + NFFT = 512 + f=int(self.sample_rate/2)*np.linspace(0,1,int(NFFT/2)+1) + fmel=self.to_mel(f) # Hz to mel conversion + fmelmax=np.max(fmel) + fmelmin=np.min(fmel) + filbandwidthsmel=np.linspace(fmelmin,fmelmax,self.out_channels+1) + filbandwidthsf=self.to_hz(filbandwidthsmel) # Mel to Hz conversion + self.mel=filbandwidthsf + self.hsupp=torch.arange(-(self.kernel_size-1)/2, (self.kernel_size-1)/2+1) + self.band_pass=torch.zeros(self.out_channels,self.kernel_size) + + + + def forward(self,x): + for i in range(len(self.mel)-1): + fmin=self.mel[i] + fmax=self.mel[i+1] + hHigh=(2*fmax/self.sample_rate)*np.sinc(2*fmax*self.hsupp/self.sample_rate) + hLow=(2*fmin/self.sample_rate)*np.sinc(2*fmin*self.hsupp/self.sample_rate) + hideal=hHigh-hLow + + self.band_pass[i,:]=Tensor(np.hamming(self.kernel_size))*Tensor(hideal) + + band_pass_filter=self.band_pass.to(self.device) + + self.filters = (band_pass_filter).view(self.out_channels, 1, self.kernel_size) + + return F.conv1d(x, self.filters, stride=self.stride, + padding=self.padding, dilation=self.dilation, + bias=None, groups=1) + + + +class Residual_block(nn.Module): + def __init__(self, nb_filts, first = False): + super(Residual_block, self).__init__() + self.first = first + + if not self.first: + self.bn1 = nn.BatchNorm1d(num_features = nb_filts[0]) + + self.lrelu = nn.LeakyReLU(negative_slope=0.3) + + self.conv1 = nn.Conv1d(in_channels = nb_filts[0], + out_channels = nb_filts[1], + kernel_size = 3, + padding = 1, + stride = 1) + + self.bn2 = nn.BatchNorm1d(num_features = nb_filts[1]) + self.conv2 = nn.Conv1d(in_channels = nb_filts[1], + out_channels = nb_filts[1], + padding = 1, + kernel_size = 3, + stride = 1) + + if nb_filts[0] != nb_filts[1]: + self.downsample = True + self.conv_downsample = nn.Conv1d(in_channels = nb_filts[0], + out_channels = nb_filts[1], + padding = 0, + kernel_size = 1, + stride = 1) + + else: + self.downsample = False + self.mp = nn.MaxPool1d(3) + + def forward(self, x): + identity = x + if not self.first: + out = self.bn1(x) + out = self.lrelu(out) + else: + out = x + + out = self.conv1(x) + out = self.bn2(out) + out = self.lrelu(out) + out = self.conv2(out) + + if self.downsample: + identity = self.conv_downsample(identity) + + out += identity + out = self.mp(out) + return out + + + + + +class RawNet(nn.Module): + def __init__(self, d_args, device): + super(RawNet, self).__init__() + + + self.device=device + + self.Sinc_conv=SincConv(device=self.device, + out_channels = d_args['filts'][0], + kernel_size = d_args['first_conv'], + in_channels = d_args['in_channels'] + ) + + self.first_bn = nn.BatchNorm1d(num_features = d_args['filts'][0]) + self.selu = nn.SELU(inplace=True) + self.block0 = nn.Sequential(Residual_block(nb_filts = d_args['filts'][1], first = True)) + self.block1 = nn.Sequential(Residual_block(nb_filts = d_args['filts'][1])) + self.block2 = nn.Sequential(Residual_block(nb_filts = d_args['filts'][2])) + d_args['filts'][2][0] = d_args['filts'][2][1] + self.block3 = nn.Sequential(Residual_block(nb_filts = d_args['filts'][2])) + self.block4 = nn.Sequential(Residual_block(nb_filts = d_args['filts'][2])) + self.block5 = nn.Sequential(Residual_block(nb_filts = d_args['filts'][2])) + self.avgpool = nn.AdaptiveAvgPool1d(1) + + self.fc_attention0 = self._make_attention_fc(in_features = d_args['filts'][1][-1], + l_out_features = d_args['filts'][1][-1]) + self.fc_attention1 = self._make_attention_fc(in_features = d_args['filts'][1][-1], + l_out_features = d_args['filts'][1][-1]) + self.fc_attention2 = self._make_attention_fc(in_features = d_args['filts'][2][-1], + l_out_features = d_args['filts'][2][-1]) + self.fc_attention3 = self._make_attention_fc(in_features = d_args['filts'][2][-1], + l_out_features = d_args['filts'][2][-1]) + self.fc_attention4 = self._make_attention_fc(in_features = d_args['filts'][2][-1], + l_out_features = d_args['filts'][2][-1]) + self.fc_attention5 = self._make_attention_fc(in_features = d_args['filts'][2][-1], + l_out_features = d_args['filts'][2][-1]) + + self.bn_before_gru = nn.BatchNorm1d(num_features = d_args['filts'][2][-1]) + self.gru = nn.GRU(input_size = d_args['filts'][2][-1], + hidden_size = d_args['gru_node'], + num_layers = d_args['nb_gru_layer'], + batch_first = True) + + + self.fc1_gru = nn.Linear(in_features = d_args['gru_node'], + out_features = d_args['nb_fc_node']) + + self.fc2_gru = nn.Linear(in_features = d_args['nb_fc_node'], + out_features = d_args['nb_classes'],bias=True) + + + self.sig = nn.Sigmoid() + self.logsoftmax = nn.LogSoftmax(dim=1) + + def forward(self, x, y = None): + + + nb_samp = x.shape[0] + len_seq = x.shape[1] + x=x.view(nb_samp,1,len_seq) + + x = self.Sinc_conv(x) + x = F.max_pool1d(torch.abs(x), 3) + x = self.first_bn(x) + x = self.selu(x) + + x0 = self.block0(x) + y0 = self.avgpool(x0).view(x0.size(0), -1) # torch.Size([batch, filter]) + y0 = self.fc_attention0(y0) + y0 = self.sig(y0).view(y0.size(0), y0.size(1), -1) # torch.Size([batch, filter, 1]) + x = x0 * y0 + y0 # (batch, filter, time) x (batch, filter, 1) + + + x1 = self.block1(x) + y1 = self.avgpool(x1).view(x1.size(0), -1) # torch.Size([batch, filter]) + y1 = self.fc_attention1(y1) + y1 = self.sig(y1).view(y1.size(0), y1.size(1), -1) # torch.Size([batch, filter, 1]) + x = x1 * y1 + y1 # (batch, filter, time) x (batch, filter, 1) + + x2 = self.block2(x) + y2 = self.avgpool(x2).view(x2.size(0), -1) # torch.Size([batch, filter]) + y2 = self.fc_attention2(y2) + y2 = self.sig(y2).view(y2.size(0), y2.size(1), -1) # torch.Size([batch, filter, 1]) + x = x2 * y2 + y2 # (batch, filter, time) x (batch, filter, 1) + + x3 = self.block3(x) + y3 = self.avgpool(x3).view(x3.size(0), -1) # torch.Size([batch, filter]) + y3 = self.fc_attention3(y3) + y3 = self.sig(y3).view(y3.size(0), y3.size(1), -1) # torch.Size([batch, filter, 1]) + x = x3 * y3 + y3 # (batch, filter, time) x (batch, filter, 1) + + x4 = self.block4(x) + y4 = self.avgpool(x4).view(x4.size(0), -1) # torch.Size([batch, filter]) + y4 = self.fc_attention4(y4) + y4 = self.sig(y4).view(y4.size(0), y4.size(1), -1) # torch.Size([batch, filter, 1]) + x = x4 * y4 + y4 # (batch, filter, time) x (batch, filter, 1) + + x5 = self.block5(x) + y5 = self.avgpool(x5).view(x5.size(0), -1) # torch.Size([batch, filter]) + y5 = self.fc_attention5(y5) + y5 = self.sig(y5).view(y5.size(0), y5.size(1), -1) # torch.Size([batch, filter, 1]) + x = x5 * y5 + y5 # (batch, filter, time) x (batch, filter, 1) + + x = self.bn_before_gru(x) + x = self.selu(x) + x = x.permute(0, 2, 1) #(batch, filt, time) >> (batch, time, filt) + self.gru.flatten_parameters() + x, _ = self.gru(x) + x = x[:,-1,:] + x = self.fc1_gru(x) + x = self.fc2_gru(x) + output=self.logsoftmax(x) + print(f"Spec output shape: {output.shape}") + + return output + + + + def _make_attention_fc(self, in_features, l_out_features): + + l_fc = [] + + l_fc.append(nn.Linear(in_features = in_features, + out_features = l_out_features)) + + + + return nn.Sequential(*l_fc) + + + def _make_layer(self, nb_blocks, nb_filts, first = False): + layers = [] + #def __init__(self, nb_filts, first = False): + for i in range(nb_blocks): + first = first if i == 0 else False + layers.append(Residual_block(nb_filts = nb_filts, + first = first)) + if i == 0: nb_filts[0] = nb_filts[1] + + return nn.Sequential(*layers) + + def summary(self, input_size, batch_size=-1, device="cuda", print_fn = None): + if print_fn == None: printfn = print + model = self + + def register_hook(module): + def hook(module, input, output): + class_name = str(module.__class__).split(".")[-1].split("'")[0] + module_idx = len(summary) + + m_key = "%s-%i" % (class_name, module_idx + 1) + summary[m_key] = OrderedDict() + summary[m_key]["input_shape"] = list(input[0].size()) + summary[m_key]["input_shape"][0] = batch_size + if isinstance(output, (list, tuple)): + summary[m_key]["output_shape"] = [ + [-1] + list(o.size())[1:] for o in output + ] + else: + summary[m_key]["output_shape"] = list(output.size()) + if len(summary[m_key]["output_shape"]) != 0: + summary[m_key]["output_shape"][0] = batch_size + + params = 0 + if hasattr(module, "weight") and hasattr(module.weight, "size"): + params += torch.prod(torch.LongTensor(list(module.weight.size()))) + summary[m_key]["trainable"] = module.weight.requires_grad + if hasattr(module, "bias") and hasattr(module.bias, "size"): + params += torch.prod(torch.LongTensor(list(module.bias.size()))) + summary[m_key]["nb_params"] = params + + if ( + not isinstance(module, nn.Sequential) + and not isinstance(module, nn.ModuleList) + and not (module == model) + ): + hooks.append(module.register_forward_hook(hook)) + + device = device.lower() + assert device in [ + "cuda", + "cpu", + ], "Input device is not valid, please specify 'cuda' or 'cpu'" + + if device == "cuda" and torch.cuda.is_available(): + dtype = torch.cuda.FloatTensor + else: + dtype = torch.FloatTensor + if isinstance(input_size, tuple): + input_size = [input_size] + x = [torch.rand(2, *in_size).type(dtype) for in_size in input_size] + summary = OrderedDict() + hooks = [] + model.apply(register_hook) + model(*x) + for h in hooks: + h.remove() + + print_fn("----------------------------------------------------------------") + line_new = "{:>20} {:>25} {:>15}".format("Layer (type)", "Output Shape", "Param #") + print_fn(line_new) + print_fn("================================================================") + total_params = 0 + total_output = 0 + trainable_params = 0 + for layer in summary: + # input_shape, output_shape, trainable, nb_params + line_new = "{:>20} {:>25} {:>15}".format( + layer, + str(summary[layer]["output_shape"]), + "{0:,}".format(summary[layer]["nb_params"]), + ) + total_params += summary[layer]["nb_params"] + total_output += np.prod(summary[layer]["output_shape"]) + if "trainable" in summary[layer]: + if summary[layer]["trainable"] == True: + trainable_params += summary[layer]["nb_params"] + print_fn(line_new) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..c976179944385ccf2226635dfecd24afc47732d1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,12 @@ +wget +timm +torch +tensorflow +moviepy +librosa +ffmpeg +albumentations +opencv-python +torchsummary +onnx +onnx2pytorch \ No newline at end of file diff --git a/save_ckpts.py b/save_ckpts.py new file mode 100644 index 0000000000000000000000000000000000000000..12d829d823cbcd1219551504e49812fb1f23a4bb --- /dev/null +++ b/save_ckpts.py @@ -0,0 +1,89 @@ +import onnx +import torch +import argparse +import numpy as np +import torch.nn as nn +from models.TMC import ETMC +from models import image +from onnx2pytorch import ConvertModel + +onnx_model = onnx.load('checkpoints\\efficientnet.onnx') +pytorch_model = ConvertModel(onnx_model) + +# Define the audio_args dictionary +audio_args = { + 'nb_samp': 64600, + 'first_conv': 1024, + 'in_channels': 1, + 'filts': [20, [20, 20], [20, 128], [128, 128]], + 'blocks': [2, 4], + 'nb_fc_node': 1024, + 'gru_node': 1024, + 'nb_gru_layer': 3, + 'nb_classes': 2 +} + + +def get_args(parser): + parser.add_argument("--batch_size", type=int, default=8) + parser.add_argument("--data_dir", type=str, default="datasets/train/fakeavceleb*") + parser.add_argument("--LOAD_SIZE", type=int, default=256) + parser.add_argument("--FINE_SIZE", type=int, default=224) + parser.add_argument("--dropout", type=float, default=0.2) + parser.add_argument("--gradient_accumulation_steps", type=int, default=1) + parser.add_argument("--hidden", nargs="*", type=int, default=[]) + parser.add_argument("--hidden_sz", type=int, default=768) + parser.add_argument("--img_embed_pool_type", type=str, default="avg", choices=["max", "avg"]) + parser.add_argument("--img_hidden_sz", type=int, default=1024) + parser.add_argument("--include_bn", type=int, default=True) + parser.add_argument("--lr", type=float, default=1e-4) + parser.add_argument("--lr_factor", type=float, default=0.3) + parser.add_argument("--lr_patience", type=int, default=10) + parser.add_argument("--max_epochs", type=int, default=500) + parser.add_argument("--n_workers", type=int, default=12) + parser.add_argument("--name", type=str, default="MMDF") + parser.add_argument("--num_image_embeds", type=int, default=1) + parser.add_argument("--patience", type=int, default=20) + parser.add_argument("--savedir", type=str, default="./savepath/") + parser.add_argument("--seed", type=int, default=1) + parser.add_argument("--n_classes", type=int, default=2) + parser.add_argument("--annealing_epoch", type=int, default=10) + parser.add_argument("--device", type=str, default='cpu') + parser.add_argument("--pretrained_image_encoder", type=bool, default = False) + parser.add_argument("--freeze_image_encoder", type=bool, default = False) + parser.add_argument("--pretrained_audio_encoder", type = bool, default=False) + parser.add_argument("--freeze_audio_encoder", type = bool, default = False) + parser.add_argument("--augment_dataset", type = bool, default = True) + + for key, value in audio_args.items(): + parser.add_argument(f"--{key}", type=type(value), default=value) + +def load_spec_modality_model(args): + spec_encoder = image.RawNet(args) + ckpt = torch.load('checkpoints\RawNet2.pth', map_location = torch.device('cpu')) + spec_encoder.load_state_dict(ckpt, strict = True) + spec_encoder.eval() + return spec_encoder + + +#Load models. +parser = argparse.ArgumentParser(description="Train Models") +get_args(parser) +args, remaining_args = parser.parse_known_args() +assert remaining_args == [], remaining_args + +spec_model = load_spec_modality_model(args) + +print(f"Image model is: {pytorch_model}") + +print(f"Audio model is: {spec_model}") + + +PATH = 'checkpoints\\model.pth' + +torch.save({ + 'spec_encoder': spec_model.state_dict(), + 'rgb_encoder': pytorch_model.state_dict() +}, PATH) + +print("Model saved.") \ No newline at end of file diff --git a/utils/__pycache__/demo.txt b/utils/__pycache__/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/utils/__pycache__/logger.cpython-39.pyc b/utils/__pycache__/logger.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31d4eed3aafbbed64ddd2731cfeac803f944bced Binary files /dev/null and b/utils/__pycache__/logger.cpython-39.pyc differ diff --git a/utils/__pycache__/utils.cpython-39.pyc b/utils/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..972f0ed968fbf322f0b81fc131ea5fc54b99e675 Binary files /dev/null and b/utils/__pycache__/utils.cpython-39.pyc differ diff --git a/utils/demo.txt b/utils/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/utils/logger.py b/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..869ce74a0758449a37dfce6d607aec773a268eb2 --- /dev/null +++ b/utils/logger.py @@ -0,0 +1,58 @@ +import logging +import time +from datetime import timedelta + + +class LogFormatter: + def __init__(self): + self.start_time = time.time() + + def format(self, record): + elapsed_seconds = round(record.created - self.start_time) + + prefix = "%s - %s - %s" % ( + record.levelname, + time.strftime("%x %X"), + timedelta(seconds=elapsed_seconds), + ) + message = record.getMessage() + message = message.replace("\n", "\n" + " " * (len(prefix) + 3)) + return "%s - %s" % (prefix, message) + + +def create_logger(filepath, args): + # create log formatter + log_formatter = LogFormatter() + + # create file handler and set level to debug + file_handler = logging.FileHandler(filepath, "a") + file_handler.setLevel(logging.DEBUG) + file_handler.setFormatter(log_formatter) + + # create console handler and set level to info + console_handler = logging.StreamHandler() + console_handler.setLevel(logging.INFO) + console_handler.setFormatter(log_formatter) + + # create logger and set level to debug + logger = logging.getLogger() + logger.handlers = [] + logger.setLevel(logging.DEBUG) + logger.propagate = False + logger.addHandler(file_handler) + logger.addHandler(console_handler) + + # reset logger elapsed time + def reset_time(): + log_formatter.start_time = time.time() + + logger.reset_time = reset_time + + logger.info( + "\n".join( + "%s: %s" % (k, str(v)) + for k, v in sorted(dict(vars(args)).items(), key=lambda x: x[0]) + ) + ) + + return logger diff --git a/utils/utils.py b/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8590112e9166b17a1c2b897d0f6d67734152894c --- /dev/null +++ b/utils/utils.py @@ -0,0 +1,55 @@ +import contextlib +import numpy as np +import random +import shutil +import os + +import torch + + +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def save_checkpoint(state, is_best, checkpoint_path, filename="checkpoint.pt"): + filename = os.path.join(checkpoint_path, filename) + torch.save(state, filename) + if is_best: + shutil.copyfile(filename, os.path.join(checkpoint_path, "model_best.pt")) + + +def load_checkpoint(model, path): + best_checkpoint = torch.load(path) + model.load_state_dict(best_checkpoint["state_dict"]) + +def log_metrics(set_name, metrics, logger): + logger.info( + "{}: Loss: {:.5f} | spec_acc: {:.5f}, rgb_acc: {:.5f}".format( + set_name, metrics["loss"], metrics["spec_acc"], metrics["rgb_acc"] + ) + ) + + +@contextlib.contextmanager +def numpy_seed(seed, *addl_seeds): + """Context manager which seeds the NumPy PRNG with the specified seed and + restores the state afterward""" + if seed is None: + yield + return + if len(addl_seeds) > 0: + seed = int(hash((seed, *addl_seeds)) % 1e6) + state = np.random.get_state() + np.random.seed(seed) + try: + yield + finally: + np.random.set_state(state) diff --git a/videos/celeb_synthesis.mp4 b/videos/celeb_synthesis.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..cabc09cd1d8ce8a891fd7aab206b8c61d439f0dc --- /dev/null +++ b/videos/celeb_synthesis.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fd703aab8ea97a9331f2dd46fea5f84769972ea92c2a1c860931fe96ee0d232 +size 4949752 diff --git a/videos/demo.txt b/videos/demo.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391