alkatraz445 commited on
Commit
e44aab7
1 Parent(s): 466eea9
Files changed (3) hide show
  1. app.py +42 -0
  2. examples.zip +3 -0
  3. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import numpy as np
3
+ import os
4
+ import zipfile
5
+ from transformers import pipeline
6
+
7
+ with zipfile.ZipFile("examples.zip","r") as zip_ref:
8
+ zip_ref.extractall(".")
9
+
10
+ EXAMPLES_FOLDER = 'examples'
11
+ examples_names = os.listdir(EXAMPLES_FOLDER)
12
+ examples = []
13
+ for example_name in examples_names:
14
+ example_path = os.path.join(EXAMPLES_FOLDER, example_name)
15
+ label = example_name.split('_')[0]
16
+ example = {
17
+ 'path': example_path,
18
+ 'label': label
19
+ }
20
+ examples.append(example)
21
+ np.random.shuffle(examples)
22
+
23
+ # Load the Hugging Face model
24
+ model = pipeline("image-classification", model="dima806/deepfake_vs_real_image_detection", device="cuda")
25
+
26
+ # Define the prediction function
27
+ def classify_image(image):
28
+ results = model(image)
29
+ return {result['label']: result['score'] for result in results}
30
+
31
+ # Create the Gradio interface
32
+ interface = gr.Interface(
33
+ fn=classify_image,
34
+ inputs=gr.Image(type="pil"), # Accepts images in PIL format
35
+ outputs=gr.Label(num_top_classes=2), # Displays top two classifications with probabilities
36
+ title="Deepfake vs Real Image Detection",
37
+ description="Upload an image to determine whether it's real or deepfake.",
38
+ examples=[[examples[i]["path"], examples[i]["label"]] for i in range(10)]
39
+ )
40
+
41
+ # Launch the Gradio app
42
+ interface.launch()
examples.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c719c2f16bad2e71d0d33d6ae59fa646dac82a812e1a90578c4b97ef6e8f36c
3
+ size 28750945
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ gradio
2
+ Pillow
3
+ pytorch
4
+ transformers