perler commited on
Commit
1a55389
·
1 Parent(s): f34279e

fix header text

Browse files

update requirements, add PPSurf as copy

lfs track ply

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. README.md +1 -0
  3. app.py +1 -2
  4. ppsurf/.gitignore +145 -0
  5. ppsurf/LICENSE +21 -0
  6. ppsurf/README.md +323 -0
  7. ppsurf/configs/device_server.yaml +13 -0
  8. ppsurf/configs/poco.yaml +77 -0
  9. ppsurf/configs/poco_mini.yaml +13 -0
  10. ppsurf/configs/ppsurf.yaml +13 -0
  11. ppsurf/configs/ppsurf_100nn.yaml +8 -0
  12. ppsurf/configs/ppsurf_10nn.yaml +8 -0
  13. ppsurf/configs/ppsurf_200nn.yaml +9 -0
  14. ppsurf/configs/ppsurf_25nn.yaml +8 -0
  15. ppsurf/configs/ppsurf_50nn.yaml +8 -0
  16. ppsurf/configs/ppsurf_mini.yaml +13 -0
  17. ppsurf/configs/profiler.yaml +8 -0
  18. ppsurf/datasets/abc_minimal/03_meshes/00010009_d97409455fa543b3a224250f_trimesh_000.ply +3 -0
  19. ppsurf/datasets/abc_minimal/03_meshes/00010039_75f31cb4dff84986aadc622b_trimesh_000.ply +3 -0
  20. ppsurf/datasets/abc_minimal/03_meshes/00010045_75f31cb4dff84986aadc622b_trimesh_006.ply +3 -0
  21. ppsurf/datasets/abc_minimal/03_meshes/00010071_493cf58028d24a5b97528c11_trimesh_001.ply +3 -0
  22. ppsurf/datasets/abc_minimal/03_meshes/00010074_493cf58028d24a5b97528c11_trimesh_004.ply +3 -0
  23. ppsurf/datasets/abc_minimal/03_meshes/00010089_5ae1ee45b583467fa009adc4_trimesh_000.ply +3 -0
  24. ppsurf/datasets/abc_minimal/03_meshes/00010098_1f6110e499fb41c582c50527_trimesh_001.ply +3 -0
  25. ppsurf/datasets/abc_minimal/03_meshes/00011084_fddd53ce45f640f3ab922328_trimesh_019.ply +3 -0
  26. ppsurf/datasets/abc_minimal/03_meshes/00016513_3d6966cd42eb44ab8f4224f2_trimesh_053.ply +3 -0
  27. ppsurf/datasets/abc_minimal/03_meshes/00994122_57d9d4755722f9d2d7436f0a_trimesh_000.ply +3 -0
  28. ppsurf/datasets/abc_minimal/04_pts_vis/00010009_d97409455fa543b3a224250f_trimesh_000.xyz.ply +3 -0
  29. ppsurf/datasets/abc_minimal/04_pts_vis/00010039_75f31cb4dff84986aadc622b_trimesh_000.xyz.ply +3 -0
  30. ppsurf/datasets/abc_minimal/04_pts_vis/00010045_75f31cb4dff84986aadc622b_trimesh_006.xyz.ply +3 -0
  31. ppsurf/datasets/abc_minimal/04_pts_vis/00010071_493cf58028d24a5b97528c11_trimesh_001.xyz.ply +3 -0
  32. ppsurf/datasets/abc_minimal/04_pts_vis/00010074_493cf58028d24a5b97528c11_trimesh_004.xyz.ply +3 -0
  33. ppsurf/datasets/abc_minimal/04_pts_vis/00010089_5ae1ee45b583467fa009adc4_trimesh_000.xyz.ply +3 -0
  34. ppsurf/datasets/abc_minimal/04_pts_vis/00010098_1f6110e499fb41c582c50527_trimesh_001.xyz.ply +3 -0
  35. ppsurf/datasets/abc_minimal/04_pts_vis/00011084_fddd53ce45f640f3ab922328_trimesh_019.xyz.ply +3 -0
  36. ppsurf/datasets/abc_minimal/04_pts_vis/00016513_3d6966cd42eb44ab8f4224f2_trimesh_053.xyz.ply +3 -0
  37. ppsurf/datasets/abc_minimal/04_pts_vis/00994122_57d9d4755722f9d2d7436f0a_trimesh_000.xyz.ply +3 -0
  38. ppsurf/datasets/abc_minimal/05_query_dist/00010009_d97409455fa543b3a224250f_trimesh_000.ply.npy +3 -0
  39. ppsurf/datasets/abc_minimal/05_query_dist/00010039_75f31cb4dff84986aadc622b_trimesh_000.ply.npy +3 -0
  40. ppsurf/datasets/abc_minimal/05_query_dist/00010045_75f31cb4dff84986aadc622b_trimesh_006.ply.npy +3 -0
  41. ppsurf/datasets/abc_minimal/05_query_dist/00010071_493cf58028d24a5b97528c11_trimesh_001.ply.npy +3 -0
  42. ppsurf/datasets/abc_minimal/05_query_dist/00010074_493cf58028d24a5b97528c11_trimesh_004.ply.npy +3 -0
  43. ppsurf/datasets/abc_minimal/05_query_dist/00010089_5ae1ee45b583467fa009adc4_trimesh_000.ply.npy +3 -0
  44. ppsurf/datasets/abc_minimal/05_query_dist/00010098_1f6110e499fb41c582c50527_trimesh_001.ply.npy +3 -0
  45. ppsurf/datasets/abc_minimal/05_query_dist/00011084_fddd53ce45f640f3ab922328_trimesh_019.ply.npy +3 -0
  46. ppsurf/datasets/abc_minimal/05_query_dist/00016513_3d6966cd42eb44ab8f4224f2_trimesh_053.ply.npy +3 -0
  47. ppsurf/datasets/abc_minimal/05_query_dist/00994122_57d9d4755722f9d2d7436f0a_trimesh_000.ply.npy +3 -0
  48. ppsurf/datasets/abc_minimal/05_query_pts/00010009_d97409455fa543b3a224250f_trimesh_000.ply.npy +3 -0
  49. ppsurf/datasets/abc_minimal/05_query_pts/00010039_75f31cb4dff84986aadc622b_trimesh_000.ply.npy +3 -0
  50. ppsurf/datasets/abc_minimal/05_query_pts/00010045_75f31cb4dff84986aadc622b_trimesh_006.ply.npy +3 -0
.gitattributes CHANGED
@@ -34,3 +34,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  *.obj filter=lfs diff=lfs merge=lfs -text
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  *.obj filter=lfs diff=lfs merge=lfs -text
37
+ *.ply filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -4,6 +4,7 @@ emoji: 🗿
4
  colorFrom: gray
5
  colorTo: blue
6
  sdk: gradio
 
7
  pinned: true
8
  license: mit
9
  suggested_hardware: a10g-small
 
4
  colorFrom: gray
5
  colorTo: blue
6
  sdk: gradio
7
+ sdk_version: 4.44.1
8
  pinned: true
9
  license: mit
10
  suggested_hardware: a10g-small
app.py CHANGED
@@ -91,8 +91,7 @@ def main():
91
 
92
  subprocess.run(['pip', 'list'])
93
 
94
- description_header = '# PPSurf: Combining Patches and Point Convolutions for Detailed Surface Reconstruction\n
95
- Note: Hugginface disabled docker support for Zero-GPU without notification and no solution. I will fix this space when I find the time.'
96
 
97
  description_col0 = '''## [Github](https://github.com/cg-tuwien/ppsurf)
98
  Supported input file formats:
 
91
 
92
  subprocess.run(['pip', 'list'])
93
 
94
+ description_header = '# PPSurf: Combining Patches and Point Convolutions for Detailed Surface Reconstruction'
 
95
 
96
  description_col0 = '''## [Github](https://github.com/cg-tuwien/ppsurf)
97
  Supported input file formats:
ppsurf/.gitignore ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ pip-wheel-metadata/
24
+ share/python-wheels/
25
+ *.egg-info/
26
+ .installed.cfg
27
+ *.egg
28
+ MANIFEST
29
+
30
+ # PyInstaller
31
+ # Usually these files are written by a python script from a template
32
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
33
+ *.manifest
34
+ *.spec
35
+
36
+ # Installer logs
37
+ pip-log.txt
38
+ pip-delete-this-directory.txt
39
+
40
+ # Unit test / coverage reports
41
+ htmlcov/
42
+ .tox/
43
+ .nox/
44
+ .coverage
45
+ .coverage.*
46
+ .cache
47
+ nosetests.xml
48
+ coverage.xml
49
+ *.cover
50
+ *.py,cover
51
+ .hypothesis/
52
+ .pytest_cache/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ target/
76
+
77
+ # Jupyter Notebook
78
+ .ipynb_checkpoints
79
+
80
+ # IPython
81
+ profile_default/
82
+ ipython_config.py
83
+
84
+ # pyenv
85
+ .python-version
86
+
87
+ # pipenv
88
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
90
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
91
+ # install all needed dependencies.
92
+ #Pipfile.lock
93
+
94
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95
+ __pypackages__/
96
+
97
+ # Celery stuff
98
+ celerybeat-schedule
99
+ celerybeat.pid
100
+
101
+ # SageMath parsed files
102
+ *.sage.py
103
+
104
+ # Environments
105
+ .env
106
+ .venv
107
+ env/
108
+ venv/
109
+ ENV/
110
+ env.bak/
111
+ venv.bak/
112
+
113
+ # Spyder project settings
114
+ .spyderproject
115
+ .spyproject
116
+
117
+ # Rope project settings
118
+ .ropeproject
119
+
120
+ # mkdocs documentation
121
+ /site
122
+
123
+ # mypy
124
+ .mypy_cache/
125
+ .dmypy.json
126
+ dmypy.json
127
+
128
+ # Pyre type checker
129
+ .pyre/
130
+ .idea/
131
+
132
+ *.zip
133
+ *.pth
134
+
135
+ results/
136
+ models/
137
+ logs/
138
+ datasets/*
139
+ !datasets/abc_minimal/
140
+ logs_framework/
141
+ debug/
142
+ .vscode/
143
+ backups/
144
+ lightning_logs/
145
+ pl_models/
ppsurf/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Philipp Erler
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
ppsurf/README.md ADDED
@@ -0,0 +1,323 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # PPSurf
2
+ Combining Patches and Point Convolutions for Detailed Surface Reconstruction
3
+
4
+ This is our implementation of [PPSurf](https://www.cg.tuwien.ac.at/research/publications/2024/erler_2024_ppsurf/),
5
+ a network that estimates a signed distance function from point clouds. This SDF is turned into a mesh with Marching Cubes.
6
+
7
+ ![PPSurf teaser](images/teaser.png)
8
+
9
+ This is our follow-up work for [Points2Surf](https://www.cg.tuwien.ac.at/research/publications/2020/erler-p2s/).
10
+ It uses parts of [POCO](https://github.com/valeoai/POCO), mainly the network and mesh extraction.
11
+ This work was published in [Computer Graphics Forum (Jan 2024)](https://onlinelibrary.wiley.com/doi/10.1111/cgf.15000).
12
+
13
+
14
+ ## Setup
15
+
16
+ We tested this repository on these systems:
17
+ * Windows 10/11 and Ubuntu 22.04 LTS
18
+ * CUDA 11.7, 11.8 and 12.1
19
+
20
+ To manage the Python environments, we recommend using [Micromamba](https://github.com/mamba-org/mamba),
21
+ a much faster Anaconda alternative.
22
+ To install it, [follow this guide](https://mamba.readthedocs.io/en/latest/micromamba-installation.html#umamba-install).
23
+
24
+ Alternatively, you can install the required packages with conda by simply replacing the 'mamba' calls with 'conda'.
25
+ Finally, you can use Pip with the requirements.txt.
26
+
27
+ ``` bash
28
+ # clone this repo, a minimal dataset is included
29
+ git clone https://github.com/ErlerPhilipp/ppsurf.git
30
+
31
+ # go into the cloned dir
32
+ cd ppsurf
33
+
34
+ # create the environment with the required packages
35
+ mamba env create --file pps{_win}.yml
36
+
37
+ # activate the new environment
38
+ mamba activate pps
39
+ ```
40
+ Use `pps_win.yml` for Windows and `pps.yml` for other OS.
41
+
42
+ Test the setup with the minimal dataset included in the repo:
43
+ ``` bash
44
+ python full_run_pps_mini.py
45
+ ```
46
+
47
+ ## Datasets, Model and Results
48
+
49
+ Datesets:
50
+ ``` bash
51
+ # download the ABC training and validation set
52
+ python datasets/download_abc_training.py
53
+
54
+ # download the test datasets
55
+ python datasets/download_testsets.py
56
+ ```
57
+
58
+ Model:
59
+ ``` bash
60
+ python models/download_ppsurf_50nn.py
61
+ ```
62
+ Let us know in case you need the other models from the ablation.
63
+ They were trained using old, unclean code and are not directly compatible with this repo.
64
+
65
+ Results:
66
+
67
+ Download the results used for the paper from [here](https://www.cg.tuwien.ac.at/research/publications/2024/erler_2024_ppsurf/).
68
+ This includes meshes and metrics for the 50NN variant.
69
+
70
+ ## Reconstruct single Point Clouds
71
+
72
+ After the setup, you can reconstruct a point cloud with this simple command:
73
+ ``` bash
74
+ python pps.py rec {in_file} {out_dir} {extra params}
75
+
76
+ # example
77
+ python pps.py rec "datasets/abc_minimal/04_pts_vis/00010009_d97409455fa543b3a224250f_trimesh_000.xyz.ply" "results/my clouds/" --model.init_args.gen_resolution_global 129
78
+ ```
79
+ Where *in_file* is the path to the point cloud and *out_dir* is the path to the output directory.
80
+ This will download our pre-trained 50NN model if necessary and reconstruct the point cloud with it.
81
+ You can append additional parameters as described in [Command Line Interface Section](#Command-Line-Interface).
82
+
83
+ *rec* is actually not a sub-command but is converted to *predict* with the default parameters before parsing.
84
+ You can use the *predict* sub-command directly for more control over the reconstruction:
85
+ ``` bash
86
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
87
+ --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt --trainer.logger False --trainer.devices 1 \
88
+ --data.init_args.in_file {in_file} --model.init_args.results_dir {out_dir}
89
+ ```
90
+
91
+ Using the *predict* sub-command will **not** download our pre-trained model. You can download it manually:
92
+ ``` bash
93
+ python models/download_ppsurf_50nn.py
94
+ ```
95
+
96
+ Supported file formats are:
97
+ - PLY, STL, OBJ and other mesh files loaded by [trimesh](https://github.com/mikedh/trimesh).
98
+ - XYZ as whitespace-separated text file, read by [NumPy](https://numpy.org/doc/stable/reference/generated/numpy.loadtxt.html).
99
+ Load first 3 columns as XYZ coordinates. All other columns will be ignored.
100
+ - NPY and NPZ, read by [NumPy](https://numpy.org/doc/stable/reference/generated/numpy.load.html).
101
+ NPZ assumes default key='arr_0'. All columns after the first 3 columns will be ignored.
102
+ - LAS and LAZ (version 1.0-1.4), COPC and CRS loaded by [Laspy](https://github.com/laspy/laspy).
103
+ You may want to sub-sample large point clouds to ~250k points to avoid speed and memory issues.
104
+ For detailed reconstruction, you'll need to extract parts of large point clouds.
105
+
106
+
107
+ ## Replicate Results
108
+
109
+ Train, reconstruct and evaluate to replicate the main results (PPSurf 50NN) from the paper
110
+ ``` bash
111
+ python full_run_pps.py
112
+ ```
113
+
114
+ Training takes about 5 hours on 4 A40 GPUs. By default, training will use all available GPUs and CPUs.
115
+ Reconstructing one object takes about 1 minute on a single A40. The test sets have almost 1000 objects in total.
116
+
117
+ Logging during training with Tensorboard is enabled by default.
118
+ We log the loss, accuracy, recall and F1 score for the sign prediction.
119
+ You can start a Tensorboard server with:
120
+ ``` bash
121
+ tensorboard --logdir models
122
+ ```
123
+
124
+
125
+ ## Command Line Interface
126
+
127
+ PPSurf uses the Pytorch-Lightning [CLI](https://lightning.ai/docs/pytorch/stable/cli/lightning_cli.html).
128
+ The basic structure is:
129
+ ``` bash
130
+ # CLI command template
131
+ python {CLI entry point} {sub-command} {configs} {extra params}
132
+ ```
133
+ Where the *CLI entry point* is either `pps.py` or `poco.py` and *sub-command* can be one of *[fit, test, predict]*.
134
+ *Fit* trains a model, *test* evaluates it and *predict* reconstructs a whole dataset or a single point cloud.
135
+
136
+ *Configs* can be any number of YAML files. Later ones override values from earlier ones.
137
+ This example adapts the default POCO parameters to PPSurf needs and uses all GPUs of our training server:
138
+ ``` bash
139
+ -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/device_server.yaml
140
+ ```
141
+
142
+ You can override any available parameter explicitly:
143
+ ``` bash
144
+ --model.init_args.gen_resolution_global 129 --debug True
145
+ ```
146
+
147
+ When running *test*, *predict* or *rec*, you need to consider a few more things.
148
+ Make sure to specify the model checkpoint!
149
+ Also, you need to specify a dataset, since the default is the training set.
150
+ Finally, you should disable the logger, or it will create empty folders and logs.
151
+ ``` bash
152
+ --ckpt_path 'models/{name}/version_{version}/checkpoints/last.ckpt' --data.init_args.in_file 'datasets/abc_minimal/testset.txt' --trainer.logger False
153
+ ```
154
+ where *name* is e.g. ppsurf and *version* is usually 0.
155
+ If you run the training multiple times, you need to increment the version number.
156
+
157
+ Appending this will print the assembled config without running anything:
158
+ ``` bash
159
+ --print_config
160
+ ```
161
+
162
+ These are the commands called by full_run_pps.py to reproduce our results *PPSurf 50 NN*:
163
+ ``` bash
164
+ # train
165
+ python pps.py fit -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/device_server.yaml -c configs/ppsurf_50nn.yaml
166
+
167
+ # test
168
+ python pps.py test -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
169
+ --data.init_args.in_file datasets/abc/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
170
+ --trainer.logger False --trainer.devices 1
171
+
172
+ # predict all ABC datasets
173
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
174
+ --data.init_args.in_file datasets/abc/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
175
+ --trainer.logger False --trainer.devices 1
176
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
177
+ --data.init_args.in_file datasets/abc_extra_noisy/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
178
+ --trainer.logger False --trainer.devices 1
179
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
180
+ --data.init_args.in_file datasets/abc_noisefree/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
181
+ --trainer.logger False --trainer.devices 1
182
+
183
+ # predict all Famous datasets
184
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
185
+ --data.init_args.in_file datasets/famous_original/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
186
+ --trainer.logger False --trainer.devices 1
187
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
188
+ --data.init_args.in_file datasets/famous_noisefree/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
189
+ --trainer.logger False --trainer.devices 1
190
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
191
+ --data.init_args.in_file datasets/famous_sparse/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
192
+ --trainer.logger False --trainer.devices 1
193
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
194
+ --data.init_args.in_file datasets/famous_dense/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
195
+ --trainer.logger False --trainer.devices 1
196
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
197
+ --data.init_args.in_file datasets/famous_extra_noisy/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
198
+ --trainer.logger False --trainer.devices 1
199
+
200
+ # predict all Thingi10k datasets
201
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
202
+ --data.init_args.in_file datasets/thingi10k_scans_original/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
203
+ --trainer.logger False --trainer.devices 1
204
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
205
+ --data.init_args.in_file datasets/thingi10k_scans_noisefree/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
206
+ --trainer.logger False --trainer.devices 1
207
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
208
+ --data.init_args.in_file datasets/thingi10k_scans_sparse/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
209
+ --trainer.logger False --trainer.devices 1
210
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
211
+ --data.init_args.in_file datasets/thingi10k_scans_dense/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
212
+ --trainer.logger False --trainer.devices 1
213
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
214
+ --data.init_args.in_file datasets/thingi10k_scans_extra_noisy/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
215
+ --trainer.logger False --trainer.devices 1
216
+
217
+ # predict the real-world dataset
218
+ python pps.py predict -c configs/poco.yaml -c configs/ppsurf.yaml -c configs/ppsurf_50nn.yaml \
219
+ --data.init_args.in_file datasets/real_world/testset.txt --ckpt_path models/ppsurf_50nn/version_0/checkpoints/last.ckpt \
220
+ --trainer.logger False --trainer.devices 1
221
+
222
+ # create comparison tables (will have only 50NN column)
223
+ python source/figures/comp_all.py
224
+ ```
225
+
226
+
227
+ ## Outputs and Evaluation
228
+
229
+ **Training**:
230
+ Model checkpoints, hyperparameters and logs are stored in `models/{model}/version_{version}/`.
231
+ The version number is incremented with each training run.
232
+ The checkpoint for further use is `models/{model}/version_{version}/checkpoints/last.ckpt`.
233
+
234
+ **Testing**:
235
+ Test results are stored in `results/{model}/{dataset}/metrics_{model}.xlsx`.
236
+ This is like the validation but on all data of the test/val set with additional metrics.
237
+
238
+ **Reconstruction**:
239
+ Reconstructed meshes are stored in `results/{model}/{dataset}/meshes`.
240
+ After reconstruction, metrics are computed and stored in `results/{model}/{dataset}/{metric}_{model}.xlsx`,
241
+ where *metric* is one of *[chamfer_distance, f1, iou, normal_error]*.
242
+
243
+ **Metrics**:
244
+ You can (re-)run the metrics, e.g. for other methods, with:
245
+ ``` bash
246
+ python source/make_evaluation.py
247
+ ```
248
+ You may need to adjust *model_names* and *dataset_names* in this script.
249
+ This supports the results of other methods if they are in the same structure as ours.
250
+
251
+ **Comparisons**:
252
+ We provide scripts to generate comparisons in `source/figures`:
253
+ ``` bash
254
+ python source/figures/comp_{comp_name}.py
255
+ ```
256
+ This will:
257
+ - assemble the per-shape metrics spreadsheets of all relevant methods in `results/comp/{dataset}/{metric}.xlsx`.
258
+ - compute and visualize the Chamfer distance, encoded as vertex colors in
259
+ `results/comp/{dataset}/{method}/mesh_cd_vis` as PLY.
260
+ - render the reconstructed mesh with and without distance colors in `results/comp/{dataset}/{method}/mesh_rend` and
261
+ `results/comp/{dataset}/{method}/cd_vis_rend` as PNG.
262
+ - render the GT mesh in `results/comp/{dataset}/mesh_gt_rend` as PNG. Note that this does only work if a real screen is attached.
263
+ - assemble the per-method mean, median and stddev for all metrics in `results/comp/{comp_name}.xlsx`.
264
+ - assemble all renderings as a qualitative report in `results/comp/{comp_name}.html`.
265
+ - assemble per-dataset mean for all relevant datasets, methods and metrics in `results/comp/reports/{comp_name}`
266
+ as spreadsheet and LaTex table.
267
+
268
+ **Figures**:
269
+ You can prepare Chamfer distance data and render the results with Blender using these scripts:
270
+ ``` bash
271
+ python source/figures/prepare_figures.py
272
+ python source/figures/render_meshes_blender.py
273
+ ```
274
+ This requires some manual camera adjustment in Blender for some objects.
275
+ Please don't ask for support on this messy last-minute code.
276
+
277
+
278
+ ## Trouble Shooting
279
+
280
+ On Windows, you might run into DLL load issues. If so, try re-installing intel-openmp:
281
+ ``` bash
282
+ mamba install -c defaults intel-openmp --force-reinstall
283
+ ```
284
+
285
+ Conda/Mamba might run into a compile error while installing the environment. If so, try updating conda:
286
+ ``` bash
287
+ conda update -n base -c defaults conda
288
+ ```
289
+
290
+ Pip might fail when creating the environment. If so, try installing the Pip packages from the `pps.yml` manually.
291
+
292
+ On Windows, Pip install may raise a
293
+ "Microsoft Visual C++ 14.0 or greater is required.
294
+ Get it with "Microsoft C++ Build Tools" error.
295
+ In this case, install the MS Visual Studio build tools,
296
+ as described on [Stackoverflow](https://stackoverflow.com/questions/64261546/how-to-solve-error-microsoft-visual-c-14-0-or-greater-is-required-when-inst).
297
+
298
+
299
+ ## Updates
300
+
301
+ ### 2023-10-13
302
+
303
+ Improved speed by using [pykdtree](https://github.com/storpipfugl/pykdtree)
304
+ instead of [Scipy KDTree](https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.KDTree.html) for k-NN queries
305
+
306
+
307
+ ## Citation
308
+ If you use our work, please cite our paper:
309
+ ```
310
+ @article{ppsurf2024,
311
+ author = {Erler, Philipp and Fuentes-Perez, Lizeth and Hermosilla, Pedro and Guerrero, Paul and Pajarola, Renato and Wimmer, Michael},
312
+ title = {PPSurf: Combining Patches and Point Convolutions for Detailed Surface Reconstruction},
313
+ journal = {Computer Graphics Forum},
314
+ volume = {n/a},
315
+ number = {n/a},
316
+ pages = {e15000},
317
+ keywords = {modeling, surface reconstruction},
318
+ doi = {https://doi.org/10.1111/cgf.15000},
319
+ url = {https://onlinelibrary.wiley.com/doi/abs/10.1111/cgf.15000},
320
+ eprint = {https://onlinelibrary.wiley.com/doi/pdf/10.1111/cgf.15000},
321
+ abstract = {Abstract 3D surface reconstruction from point clouds is a key step in areas such as content creation, archaeology, digital cultural heritage and engineering. Current approaches either try to optimize a non-data-driven surface representation to fit the points, or learn a data-driven prior over the distribution of commonly occurring surfaces and how they correlate with potentially noisy point clouds. Data-driven methods enable robust handling of noise and typically either focus on a global or a local prior, which trade-off between robustness to noise on the global end and surface detail preservation on the local end. We propose PPSurf as a method that combines a global prior based on point convolutions and a local prior based on processing local point cloud patches. We show that this approach is robust to noise while recovering surface details more accurately than the current state-of-the-art. Our source code, pre-trained model and dataset are available at https://github.com/cg-tuwien/ppsurf.}
322
+ }
323
+ ```
ppsurf/configs/device_server.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ strategy: ddp
3
+ # strategy: ddp_find_unused_parameters_true
4
+
5
+ model:
6
+ init_args:
7
+ workers: &num_workers 48
8
+
9
+ data:
10
+ init_args:
11
+ use_ddp: True
12
+ workers: *num_workers
13
+ batch_size: 12 # 50 / 4 = 12.5
ppsurf/configs/poco.yaml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ debug: False
2
+ seed_everything: 42
3
+
4
+ trainer:
5
+ max_epochs: 150
6
+ default_root_dir: 'models/poco'
7
+ strategy: auto
8
+ accelerator: gpu
9
+ devices: -1
10
+ precision: 16-mixed
11
+ num_sanity_val_steps: 0
12
+ log_every_n_steps: 1
13
+ logger:
14
+ class_path: pytorch_lightning.loggers.TensorBoardLogger
15
+ init_args:
16
+ save_dir: 'models'
17
+ callbacks:
18
+ - class_path: source.cli.PPSProgressBar
19
+ - class_path: LearningRateMonitor
20
+ init_args:
21
+ logging_interval: step
22
+ - class_path: ModelCheckpoint
23
+ init_args:
24
+ save_last: True
25
+ save_top_k: 0
26
+
27
+ data:
28
+ class_path: source.poco_data_loader.PocoDataModule
29
+ init_args:
30
+ use_ddp: False
31
+ in_file: datasets/abc_train/testset.txt
32
+ padding_factor: 0.05
33
+ seed: 42
34
+ manifold_points: 10000
35
+ patches_per_shape: -1
36
+ do_data_augmentation: True
37
+ batch_size: 10
38
+ workers: 8
39
+
40
+ model:
41
+ class_path: source.poco_model.PocoModel
42
+ init_args:
43
+ output_names:
44
+ - 'imp_surf_sign'
45
+ in_channels: 3
46
+ out_channels: 2
47
+ k: 64
48
+ network_latent_size: 32
49
+ gen_subsample_manifold_iter: 10
50
+ gen_subsample_manifold: 10000
51
+ gen_resolution_global: 257
52
+ rec_batch_size: 50000
53
+ gen_refine_iter: 10
54
+ workers: 8
55
+ lambda_l1: 0.0
56
+ results_dir: 'results'
57
+ name: 'poco'
58
+ debug: False
59
+
60
+ optimizer:
61
+ class_path: torch.optim.AdamW
62
+ init_args:
63
+ lr: 0.001
64
+ betas:
65
+ - 0.9
66
+ - 0.999
67
+ eps: 1e-5
68
+ weight_decay: 1e-2
69
+ amsgrad: False
70
+
71
+ lr_scheduler:
72
+ class_path: torch.optim.lr_scheduler.MultiStepLR
73
+ init_args:
74
+ milestones:
75
+ - 75
76
+ - 125
77
+ gamma: 0.1
ppsurf/configs/poco_mini.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/poco_mini'
3
+
4
+ model:
5
+ init_args:
6
+ name: 'poco_mini'
7
+ gen_resolution_global: 129 # half resolution
8
+ rec_batch_size: 25000 # half memory
9
+
10
+ data:
11
+ init_args:
12
+ in_file: datasets/abc_minimal/testset.txt # small dataset
13
+ batch_size: 10 # 16 GB GPU memory
ppsurf/configs/ppsurf.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/ppsurf'
3
+
4
+ model:
5
+ class_path: source.ppsurf_model.PPSurfModel
6
+ init_args:
7
+ network_latent_size: 256
8
+ num_pts_local: 50
9
+ pointnet_latent_size: 256
10
+ debug: False
11
+
12
+ data:
13
+ class_path: source.ppsurf_data_loader.PPSurfDataModule
ppsurf/configs/ppsurf_100nn.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/ppsurf_100nn'
3
+
4
+ model:
5
+ init_args:
6
+ name: 'ppsurf_100nn'
7
+ num_pts_local: 100
8
+
ppsurf/configs/ppsurf_10nn.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/ppsurf_10nn'
3
+
4
+ model:
5
+ init_args:
6
+ name: 'ppsurf_10nn'
7
+ num_pts_local: 10
8
+
ppsurf/configs/ppsurf_200nn.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/ppsurf_200nn'
3
+
4
+ model:
5
+ init_args:
6
+ name: 'ppsurf_200nn'
7
+ num_pts_local: 200
8
+ rec_batch_size: 25000
9
+
ppsurf/configs/ppsurf_25nn.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/ppsurf_25nn'
3
+
4
+ model:
5
+ init_args:
6
+ name: 'ppsurf_25nn'
7
+ num_pts_local: 25
8
+
ppsurf/configs/ppsurf_50nn.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/ppsurf_50nn'
3
+
4
+ model:
5
+ init_args:
6
+ name: 'ppsurf_50nn'
7
+ num_pts_local: 50
8
+
ppsurf/configs/ppsurf_mini.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ default_root_dir: 'models/ppsurf_mini'
3
+
4
+ model:
5
+ init_args:
6
+ name: 'ppsurf_mini'
7
+ gen_resolution_global: 129 # half resolution
8
+ rec_batch_size: 25000 # half memory
9
+
10
+ data:
11
+ init_args:
12
+ in_file: datasets/abc_minimal/testset.txt # small dataset
13
+ batch_size: 10 # 16 GB GPU memory
ppsurf/configs/profiler.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ profiler:
3
+ class_path: source.cli.PPSProfiler
4
+ init_args:
5
+ export_to_chrome: False
6
+ emit_nvtx: False
7
+ with_stack: False
8
+
ppsurf/datasets/abc_minimal/03_meshes/00010009_d97409455fa543b3a224250f_trimesh_000.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d0e0aae89613fc83b37e886a060da3d2b224ee7f6788a9fdc85127a3fa2d5ae
3
+ size 366326
ppsurf/datasets/abc_minimal/03_meshes/00010039_75f31cb4dff84986aadc622b_trimesh_000.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aedcc0a61e16c8af1c3972bb9fd455107aa6bbaa4cc970e5cb6192465534fd8c
3
+ size 145139
ppsurf/datasets/abc_minimal/03_meshes/00010045_75f31cb4dff84986aadc622b_trimesh_006.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4df8a5d100893dc2f941101ef2a726ae5f36c0673b591b5f4e5404d575a1a63
3
+ size 104689
ppsurf/datasets/abc_minimal/03_meshes/00010071_493cf58028d24a5b97528c11_trimesh_001.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:141c70ef2dec82550a4f71565e80d7d1dafe44745ab1b9a139a4fbe49096a500
3
+ size 119909
ppsurf/datasets/abc_minimal/03_meshes/00010074_493cf58028d24a5b97528c11_trimesh_004.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7a6526052d923223c335a254f96e6ea197a583a79afbd31a89e273e0d8616df
3
+ size 58733
ppsurf/datasets/abc_minimal/03_meshes/00010089_5ae1ee45b583467fa009adc4_trimesh_000.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f0beae183bc8a3251af07f9bc4b431eb5dd89a95261704a11b3a2df84ac7268
3
+ size 176173
ppsurf/datasets/abc_minimal/03_meshes/00010098_1f6110e499fb41c582c50527_trimesh_001.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:376e1efe00155feed1f1e1dbd49d56157beaa5428e1000525b2ab405de784de1
3
+ size 88089
ppsurf/datasets/abc_minimal/03_meshes/00011084_fddd53ce45f640f3ab922328_trimesh_019.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8c9009b48a4a61fbb7e5180d6876a4ee4df202cc5823fa2ca93a001d3df380a
3
+ size 307116
ppsurf/datasets/abc_minimal/03_meshes/00016513_3d6966cd42eb44ab8f4224f2_trimesh_053.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5eebfc53813899cdbad4e05a02ff39a4a2472f0d23b83c74acc225d57d595f43
3
+ size 100913
ppsurf/datasets/abc_minimal/03_meshes/00994122_57d9d4755722f9d2d7436f0a_trimesh_000.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65a87b1b08c4e75beea98e47786c8d33c61d9a07d78197627ad0c17f808a4851
3
+ size 54681
ppsurf/datasets/abc_minimal/04_pts_vis/00010009_d97409455fa543b3a224250f_trimesh_000.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b5f040b9396998a99f70f597b771e5e4d62acd03c3201784e51c3ba5506e62c
3
+ size 720587
ppsurf/datasets/abc_minimal/04_pts_vis/00010039_75f31cb4dff84986aadc622b_trimesh_000.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f821d95c3185e42274707c1f19387d018d89d94f9191e498f14680a5ce5e756
3
+ size 83446
ppsurf/datasets/abc_minimal/04_pts_vis/00010045_75f31cb4dff84986aadc622b_trimesh_006.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db0d39e6373d6052b3344fe11f983e01cdd857c683051f1ee0bb88d17dd0cf53
3
+ size 230699
ppsurf/datasets/abc_minimal/04_pts_vis/00010071_493cf58028d24a5b97528c11_trimesh_001.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e96621d17d9d10535d5149f9560a43701f701ab2856381f297886bbaa6c78944
3
+ size 807491
ppsurf/datasets/abc_minimal/04_pts_vis/00010074_493cf58028d24a5b97528c11_trimesh_004.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8456c94b3dd6060f7406387063b8e47c60a47eca0582bdf1873e089a17d88a09
3
+ size 152183
ppsurf/datasets/abc_minimal/04_pts_vis/00010089_5ae1ee45b583467fa009adc4_trimesh_000.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acfe34a146d7400566157045626b4cec26b6fb11e95448c3925b67b6330eca33
3
+ size 137495
ppsurf/datasets/abc_minimal/04_pts_vis/00010098_1f6110e499fb41c582c50527_trimesh_001.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e367d04a8af767eb902da9e4047ba41db080d8dbcea958781e2b22e8259f90c
3
+ size 147875
ppsurf/datasets/abc_minimal/04_pts_vis/00011084_fddd53ce45f640f3ab922328_trimesh_019.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dff0fca9f7e9cebfcea4142bafd860ca0ecb7a916a852d2f96e99f4c2d35b06d
3
+ size 719963
ppsurf/datasets/abc_minimal/04_pts_vis/00016513_3d6966cd42eb44ab8f4224f2_trimesh_053.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8ddfcb74e2a20c3086116bc18dd7cfc6501585e2fa483b6431997150cac9685
3
+ size 1039991
ppsurf/datasets/abc_minimal/04_pts_vis/00994122_57d9d4755722f9d2d7436f0a_trimesh_000.xyz.ply ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5cf81d7822dcabbf89d5699929670a0c3fdfa89041a2bad88114bad6c7a5b08
3
+ size 416531
ppsurf/datasets/abc_minimal/05_query_dist/00010009_d97409455fa543b3a224250f_trimesh_000.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5ac5cbe1f8a37ceac430dbca46372641835264bb9705f3303756413297ef9b4
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00010039_75f31cb4dff84986aadc622b_trimesh_000.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91f79d74bc3dca2b959c5569fd8099578ae5381570151a72030549ee36b91748
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00010045_75f31cb4dff84986aadc622b_trimesh_006.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de26d05ad83c10705ad8e1125f2141b9b99decc431c7503f6b174b40a39f9d1b
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00010071_493cf58028d24a5b97528c11_trimesh_001.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8a931c4ef1917b02a68c9c226a208d940e4f92dcec64a1d5d5602377b6b7634
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00010074_493cf58028d24a5b97528c11_trimesh_004.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35f5ae18831517514f8d2e4a3ae6f9d9ca684d09775ed6a4b5ce1cd2ef2622f1
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00010089_5ae1ee45b583467fa009adc4_trimesh_000.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b36dc2750703fb29a23dc19b3d3862020b8d31c293d5a50a8a117f1a6fa8be43
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00010098_1f6110e499fb41c582c50527_trimesh_001.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33a7ada18b00f5859b72961d9eec834735efa7657bcb649dc6f2f6dd90438eb4
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00011084_fddd53ce45f640f3ab922328_trimesh_019.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db4342b68b37276a04473869b173ed022c3a760771e0dfe2defcc6595ecf2818
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00016513_3d6966cd42eb44ab8f4224f2_trimesh_053.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67abf7b61b07dfd4cf9b36a3419015e62d40c06d2134a27ffe5f38a28c152178
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_dist/00994122_57d9d4755722f9d2d7436f0a_trimesh_000.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d987baa6647b4683bea3693263658eb35cc9a0324eae0a9782bbb9a75511ea76
3
+ size 8128
ppsurf/datasets/abc_minimal/05_query_pts/00010009_d97409455fa543b3a224250f_trimesh_000.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbf87f437ca8fbd595c7fd6357f0eb3922998fa142f50023ff5864be96787ad5
3
+ size 24128
ppsurf/datasets/abc_minimal/05_query_pts/00010039_75f31cb4dff84986aadc622b_trimesh_000.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:572874bc63bdda88b5afd0c5dd3c0d7dbeb276db8ca63f3bf065e23bb09ef8c5
3
+ size 24128
ppsurf/datasets/abc_minimal/05_query_pts/00010045_75f31cb4dff84986aadc622b_trimesh_006.ply.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14d224b89581c3c000f6b8963b0991fcb54f5ac1b1fe245e7f74fa52b852d9c2
3
+ size 24128