binh230 commited on
Commit
ad96198
·
verified ·
1 Parent(s): c118fa8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +101 -0
app.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __all__ = ["app"]
2
+
3
+ import gradio as gr
4
+ import torch
5
+ import numpy as np
6
+ from transformers import AutoConfig, AutoTokenizer, DataCollatorWithPadding, DebertaV2ForSequenceClassification
7
+
8
+ MINIMUM_TOKENS = 48
9
+ FOUNDATION_MODEL_NAME = "binh230/deberta-base"
10
+ # Load the tokenizer and model for DeBERTa
11
+ tokenizer = AutoTokenizer.from_pretrained(FOUNDATION_MODEL_NAME)
12
+ config = AutoConfig.from_pretrained(FOUNDATION_MODEL_NAME)
13
+ config.num_labels = 2 # For binary classification
14
+ model = DebertaV2ForSequenceClassification.from_pretrained(FOUNDATION_MODEL_NAME, config=config)
15
+ model.to("cuda")
16
+
17
+ # Text processing and prediction function
18
+ def count_tokens(text):
19
+ return len(text.split())
20
+
21
+ def run_detector(input_str):
22
+ if count_tokens(input_str) < MINIMUM_TOKENS:
23
+ return f"Too short length. Need minimum {MINIMUM_TOKENS} tokens to run Binoculars."
24
+
25
+ # Tokenize input text
26
+ inputs = tokenizer(input_str, return_tensors="pt", padding=True, truncation=True).to("cuda")
27
+
28
+ # Run model and get prediction
29
+ with torch.no_grad():
30
+ outputs = model(**inputs)
31
+ logits = outputs.logits
32
+ prediction = torch.argmax(logits, dim=-1).item()
33
+
34
+ # Interpret prediction
35
+ return "Most likely AI-Generated" if prediction == 1 else "Most likely Human-Generated"
36
+
37
+ # Gradio app interface
38
+ css = """
39
+ .green { color: black!important; line-height:1.9em; padding: 0.2em 0.2em; background: #ccffcc; border-radius:0.5rem;}
40
+ .red { color: black!important; line-height:1.9em; padding: 0.2em 0.2em; background: #ffad99; border-radius:0.5rem;}
41
+ .hyperlinks {
42
+ display: flex;
43
+ align-items: center;
44
+ align-content: center;
45
+ padding-top: 12px;
46
+ justify-content: flex-end;
47
+ margin: 0 10px;
48
+ text-decoration: none;
49
+ color: #000;
50
+ }
51
+ """
52
+
53
+ capybara_problem = '''Dr. Capy Cosmos, a capybara unlike any other, astounded the scientific community with his groundbreaking research...'''
54
+
55
+ with gr.Blocks(css=css, theme=gr.themes.Default(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])) as app:
56
+ with gr.Row():
57
+ with gr.Column(scale=3):
58
+ gr.HTML("<h1>Mambaformer Detecting AI generated text</h1>")
59
+ with gr.Column(scale=1):
60
+ gr.HTML("""
61
+ <p>
62
+ <a href="https://github.com/DanielBinh2k3/Mamba-AI-generated-text-detection" target="_blank">code</a>
63
+ <a href="mailto:[email protected]" target="_blank">contact</a>
64
+ </p>
65
+ """, elem_classes="hyperlinks")
66
+
67
+ with gr.Row():
68
+ input_box = gr.Textbox(value=capybara_problem, placeholder="Enter text here", lines=8, label="Input Text")
69
+ with gr.Row():
70
+ submit_button = gr.Button("Run Detection", variant="primary")
71
+ clear_button = gr.ClearButton()
72
+ with gr.Row():
73
+ output_text = gr.Textbox(label="Prediction", value="Most likely AI-Generated")
74
+
75
+ with gr.Accordion("Disclaimer", open=False):
76
+ gr.Markdown("""
77
+ - `Accuracy`: AI-generated text detectors aim for accuracy, but no detector is perfect.
78
+ - `Use Cases`: This tool is most useful for detecting AI-generated content in moderation scenarios.
79
+ - `Known Weaknesses`: Non-English texts and highly memorized texts (like constitutions) may yield unreliable results.
80
+ """)
81
+
82
+ with gr.Accordion("Cite our work", open=False):
83
+ gr.Markdown("""
84
+ ```bibtex
85
+ @misc{BamBa2024llm,
86
+ title={Enhancing AI Text Detection through MambaFormer and Adversarial Learning Techniques},
87
+ author={Truong Nguyen Gia Binh},
88
+ year={2024},
89
+ eprint={},
90
+ archivePrefix={},
91
+ primaryClass={}
92
+ }
93
+ ```
94
+ """)
95
+
96
+ submit_button.click(run_detector, inputs=input_box, outputs=output_text)
97
+ clear_button.click(lambda: ("", ""), outputs=[input_box, output_text])
98
+
99
+ # Run the Gradio app
100
+ if __name__ == "__main__":
101
+ app.launch(share=True)