Spaces:
Running
Running
<html> | |
<head> | |
<meta charset="utf-8"> | |
<meta name="description" | |
content="Demo Page of BEYOND ICML 2024."> | |
<meta name="keywords" content="BEYOND, Adversarial Examples, Adversarial Detection"> | |
<meta name="viewport" content="width=device-width, initial-scale=1"> | |
<title>Be Your Own Neighborhood: Detecting Adversarial Examples by the Neighborhood Relations Built on Self-Supervised Learning</title> | |
<link href="https://fonts.googleapis.com/css?family=Google+Sans|Noto+Sans|Castoro" | |
rel="stylesheet"> | |
<link rel="stylesheet" href="./static/css/bulma.min.css"> | |
<link rel="stylesheet" href="./static/css/bulma-carousel.min.css"> | |
<link rel="stylesheet" href="./static/css/bulma-slider.min.css"> | |
<link rel="stylesheet" href="./static/css/fontawesome.all.min.css"> | |
<link rel="stylesheet" | |
href="https://cdn.jsdelivr.net/gh/jpswalsh/academicons@1/css/academicons.min.css"> | |
<link rel="stylesheet" href="./static/css/index.css"> | |
<link rel="icon" href="./static/images/favicon.svg"> | |
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script> | |
<script defer src="./static/js/fontawesome.all.min.js"></script> | |
<script src="./static/js/bulma-carousel.min.js"></script> | |
<script src="./static/js/bulma-slider.min.js"></script> | |
<script src="./static/js/index.js"></script> | |
</head> | |
<body> | |
<section class="hero"> | |
<div class="hero-body"> | |
<div class="container is-max-desktop"> | |
<div class="columns is-centered"> | |
<div class="column has-text-centered"> | |
<h1 class="title is-1 publication-title">Be Your Own Neighborhood: Detecting Adversarial Examples by the Neighborhood Relations Built on Self-Supervised Learning</h1> | |
<div class="is-size-5 publication-authors"> | |
<span class="author-block"> | |
<a href="#" target="_blank">Zhiyuan He</a><sup>1*</sup>,</span> | |
<span class="author-block"> | |
<a href="https://yangyijune.github.io/" target="_blank">Yijun Yang</a><sup>1*</sup>,</span> | |
<span class="author-block"> | |
<a href="https://sites.google.com/site/pinyuchenpage/home" target="_blank">Pin-Yu Chen</a><sup>2</sup>, | |
</span> | |
<span class="author-block"> | |
<a href="https://cure-lab.github.io/" target="_blank">Qiang Xu</a><sup>1</sup>, | |
</span> | |
<span class="author-block"> | |
<a href="https://tsungyiho.github.io/" target="_blank">Tsung-Yi Ho</a><sup>1</sup>, | |
</span> | |
</div> | |
<div class="is-size-5 publication-authors"> | |
<span class="author-block"><sup>*</sup>Equal contribution,</span> | |
<span class="author-block"><sup>1</sup>The Chinese University of Hong Kong,</span> | |
<span class="author-block"><sup>2</sup>IBM Research</span> | |
</div> | |
<div class="column has-text-centered"> | |
<div class="publication-links"> | |
<!-- PDF Link. --> | |
<span class="link-block"> | |
<a href="https://arxiv.org/abs/2209.00005" target="_blank" | |
class="external-link button is-normal is-rounded is-dark"> | |
<span class="icon"> | |
<i class="fas fa-file-pdf"></i> | |
</span> | |
<span>Paper</span> | |
</a> | |
</span> | |
<span class="link-block"> | |
<a href="https://arxiv.org/abs/2209.00005" target="_blank" | |
class="external-link button is-normal is-rounded is-dark"> | |
<span class="icon"> | |
<i class="ai ai-arxiv"></i> | |
</span> | |
<span>arXiv</span> | |
</a> | |
</span> | |
<!-- Video Link. --> | |
<!-- <span class="link-block"> | |
<a href="https://www.youtube.com/watch?v=MrKrnHhk8IA" target="_blank" | |
class="external-link button is-normal is-rounded is-dark"> | |
<span class="icon"> | |
<i class="fab fa-youtube"></i> | |
</span> | |
<span>Video</span> | |
</a> | |
</span> --> | |
<!-- Code Link. --> | |
<!-- <span class="link-block"> | |
<a href="https://github.com/google/nerfies" target="_blank" | |
class="external-link button is-normal is-rounded is-dark"> | |
<span class="icon"> | |
<i class="fab fa-github"></i> | |
</span> | |
<span>Code</span> | |
</a> | |
</span> --> | |
</div> | |
</div> | |
</div> | |
</div> | |
</div> | |
</div> | |
</section> | |
<!-- <section class="hero teaser"> | |
<div class="container is-max-desktop"> | |
<div class="hero-body"> | |
<video id="teaser" autoplay muted loop playsinline height="100%"> | |
<source src="./static/videos/teaser.mp4" | |
type="video/mp4"> | |
</video> | |
<h2 class="subtitle has-text-centered"> | |
<span class="dnerf">Nerfies</span> turns selfie videos from your phone into | |
free-viewpoint | |
portraits. | |
</h2> | |
</div> | |
</div> | |
</section> --> | |
<!-- <section class="hero is-light is-small"> | |
<div class="hero-body"> | |
<div class="container"> | |
<div id="results-carousel" class="carousel results-carousel"> | |
<div class="item item-steve"> | |
<video poster="" id="steve" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/steve.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
<div class="item item-chair-tp"> | |
<video poster="" id="chair-tp" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/chair-tp.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
<div class="item item-shiba"> | |
<video poster="" id="shiba" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/shiba.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
<div class="item item-fullbody"> | |
<video poster="" id="fullbody" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/fullbody.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
<div class="item item-blueshirt"> | |
<video poster="" id="blueshirt" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/blueshirt.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
<div class="item item-mask"> | |
<video poster="" id="mask" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/mask.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
<div class="item item-coffee"> | |
<video poster="" id="coffee" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/coffee.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
<div class="item item-toby"> | |
<video poster="" id="toby" autoplay controls muted loop playsinline height="100%"> | |
<source src="./static/videos/toby2.mp4" | |
type="video/mp4"> | |
</video> | |
</div> | |
</div> | |
</div> | |
</div> | |
</section> --> | |
<section class="section"> | |
<div class="container is-max-desktop"> | |
<!-- Abstract. --> | |
<div class="columns is-centered has-text-centered"> | |
<div class="column is-four-fifths"> | |
<h2 class="title is-3">Abstract</h2> | |
<div class="content has-text-justified"> | |
<p> | |
Deep Neural Networks (DNNs) have achieved excellent performance in various fields. However, DNNs’ vulnerability to | |
Adversarial Examples (AE) hinders their deployments to safety-critical applications. In this paper, we present <strong>BEYOND</strong>, | |
an innovative AE detection frameworkdesigned for reliable predictions. BEYOND identifies AEs by distinguishing the AE’s | |
abnormal relation with its augmented versions, i.e. neighbors, from two prospects: representation similarity and label | |
consistency. An off-the-shelf Self-Supervised Learning (SSL) model is used to extract the representation and predict the | |
label for its highly informative representation capacity compared to supervised learning models. We found clean samples | |
maintain a high degree of representation similarity and label consistency relative to their neighbors, in contrast to AEs | |
which exhibit significant discrepancies. We explain this obser vation and show that leveraging this discrepancy BEYOND can | |
accurately detect AEs. Additionally, we develop a rigorous justification for the effectiveness of BEYOND. Furthermore, as a | |
plug-and-play model, BEYOND can easily cooperate with the Adversarial Trained Classifier (ATC), achieving state-of-the-art | |
(SOTA) robustness accuracy. Experimental results show that BEYOND outperforms baselines by a large margin, especially under | |
adaptive attacks. Empowered by the robust relationship built on SSL, we found that BEYOND outperforms baselines in terms | |
of both detection ability and speed | |
</p> | |
<!-- <p> | |
We present the first method capable of photorealistically reconstructing a non-rigidly | |
deforming scene using photos/videos captured casually from mobile phones. | |
</p> | |
<p> | |
Our approach augments neural radiance fields | |
(NeRF) by optimizing an | |
additional continuous volumetric deformation field that warps each observed point into a | |
canonical 5D NeRF. | |
We observe that these NeRF-like deformation fields are prone to local minima, and | |
propose a coarse-to-fine optimization method for coordinate-based models that allows for | |
more robust optimization. | |
By adapting principles from geometry processing and physical simulation to NeRF-like | |
models, we propose an elastic regularization of the deformation field that further | |
improves robustness. | |
</p> | |
<p> | |
We show that <span class="dnerf">Nerfies</span> can turn casually captured selfie | |
photos/videos into deformable NeRF | |
models that allow for photorealistic renderings of the subject from arbitrary | |
viewpoints, which we dub <i>"nerfies"</i>. We evaluate our method by collecting data | |
using a | |
rig with two mobile phones that take time-synchronized photos, yielding train/validation | |
images of the same pose at different viewpoints. We show that our method faithfully | |
reconstructs non-rigidly deforming scenes and reproduces unseen views with high | |
fidelity. | |
</p> --> | |
</div> | |
</div> | |
</div> | |
<!--/ Abstract. --> | |
<!-- Paper video. --> | |
<!-- <div class="columns is-centered has-text-centered"> | |
<div class="column is-four-fifths"> | |
<h2 class="title is-3">Video</h2> | |
<div class="publication-video"> | |
<iframe src="https://www.youtube.com/embed/MrKrnHhk8IA?rel=0&showinfo=0" | |
frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe> | |
</div> | |
</div> | |
</div> --> | |
<!--/ Paper video. --> | |
</div> | |
</section> | |
<section class="section"> | |
<div class="container is-max-desktop"> | |
<div class="columns is-centered"> | |
<div class="column has-text-centered"> | |
<h2 class="title is-3">Introduction</h2> | |
</div> | |
</div> | |
</div> | |
</section> | |
<section class="section"> | |
<div class="container is-max-desktop"> | |
<div class="columns is-centered"> | |
<div class="column has-text-centered"> | |
<h2 class="title is-3">Method Overview of BEYOND</h2> | |
</div> | |
</div> | |
</div> | |
</section> | |
<section class="section"> | |
<div class="container is-max-desktop"> | |
<div class="columns is-centered"> | |
<div class="column has-text-centered"> | |
<h2 class="title is-3">Method Overview of BEYOND</h2> | |
</div> | |
</div> | |
</div> | |
</section> | |
<section class="section" id="BibTeX"> | |
<div class="container is-max-desktop content"> | |
<h2 class="title">BibTeX</h2> | |
<pre><code>@article{he2024beyond, | |
author = {Zhiyuan, He and Yijun, Yang and Pin-Yu, Chen and Qiang, Xu and Tsung-Yi, Ho}, | |
title = {Be your own neighborhood: Detecting adversarial example by the neighborhood relations built on self-supervised learning}, | |
journal = {ICML}, | |
year = {2024}, | |
}</code></pre> | |
</div> | |
</section> | |
<footer class="footer"> | |
<div class="container"> | |
<div class="content has-text-centered"> | |
<a class="icon-link" target="_blank" | |
href="./static/videos/nerfies_paper.pdf"> | |
<i class="fas fa-file-pdf"></i> | |
</a> | |
<a class="icon-link" href="https://github.com/keunhong" target="_blank" class="external-link" disabled> | |
<i class="fab fa-github"></i> | |
</a> | |
</div> | |
<div class="columns is-centered"> | |
<div class="column is-8"> | |
<div class="content"> | |
<p> | |
This website is licensed under a <a rel="license" target="_blank" | |
href="http://creativecommons.org/licenses/by-sa/4.0/">Creative | |
Commons Attribution-ShareAlike 4.0 International License</a>. | |
</p> | |
<p> | |
This means you are free to borrow the <a target="_blank" | |
href="https://github.com/nerfies/nerfies.github.io">source code</a> of this website, | |
we just ask that you link back to this page in the footer. | |
Please remember to remove the analytics code included in the header of the website which | |
you do not want on your website. | |
</p> | |
</div> | |
</div> | |
</div> | |
</div> | |
</footer> | |
</body> | |
</html> | |