Reverb commited on
Commit
b239c75
1 Parent(s): 98cfd78

Upload 123 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -35
  2. .gitignore +129 -0
  3. LICENSE +201 -0
  4. README.md +32 -7
  5. __pycache__/app.cpython-311.pyc +0 -0
  6. app.py +240 -0
  7. features_extraction.py +44 -0
  8. flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/4f98da9ec0f689294825687b767e5bd6abddd769/image.png +0 -0
  9. flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/51731869eb38daa90d2c38be3879322cffd0e5a8/image.png +0 -0
  10. flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/873bcc5d4e101e2df7ec1db2a29b17dc0b13f072/image.png +0 -0
  11. flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/captions.json +1 -0
  12. flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/cc007a660fb213a4a5ca9b43229bc21f28fa5792/image.png +0 -0
  13. flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/eb1653eec598be50fd369b3acc91857aa20a6c78/image.png +0 -0
  14. flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/00d27721ded93f7833b7b8ccc2d98bd113c8f73b/image.png +0 -0
  15. flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/1f6730d26ffc4df0fea7cf4d3d8e8f2f6c6521cd/image.png +0 -0
  16. flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/9b260c660faf1bf2cf4485aa65897526cc03d2d3/image.png +0 -0
  17. flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/captions.json +1 -0
  18. flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/cf2a6f967f1557bc9ca891594273f74ef7347f5d/image.png +0 -0
  19. flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/dab41f873239f5a039d53fe0d6c1d4f1f0c3034e/image.png +0 -0
  20. flagged/log.csv +3 -0
  21. photos/--2IBUMom1I.jpg +0 -0
  22. photos/--6JlGcHl-w.jpg +0 -0
  23. photos/--Jy_8mvs4E.jpg +0 -0
  24. photos/--SDX4KWIbA.jpg +0 -0
  25. photos/--Tn3E5ZtfQ.jpg +0 -0
  26. photos/--e3kJUMSZw.jpg +0 -0
  27. photos/--kGuWTwn48.jpg +0 -0
  28. photos/--lzOIJ-a4U.jpg +0 -0
  29. photos/-0YZgPxq04k.jpg +0 -0
  30. photos/-0_ww2ACIw8.jpg +0 -0
  31. photos/-0eINgEiNw4.jpg +0 -0
  32. photos/-12cgSu9HW0.jpg +0 -0
  33. photos/-1a83VD65ss.jpg +0 -0
  34. photos/-1lMrIXAn6Q.jpg +0 -0
  35. photos/-1qb8SIBzKY.jpg +0 -0
  36. photos/-2ii0_ctxpQ.jpg +0 -0
  37. photos/-2loC3xzDF8.jpg +0 -0
  38. photos/-2pFSIxX9ow.jpg +0 -0
  39. photos/-3IZERJGsm4.jpg +0 -0
  40. photos/-3LtGq_RPcY.jpg +0 -0
  41. photos/-3cTY-Q6k88.jpg +0 -0
  42. photos/-3l6KX8uCAM.jpg +0 -0
  43. photos/-3qSsolbivo.jpg +0 -0
  44. photos/-3uIUqsR-Rw.jpg +0 -0
  45. photos/-43qvNitz5k.jpg +0 -0
  46. photos/-4AR-vVjAbM.jpg +0 -0
  47. photos/-4UwhAr4KYg.jpg +0 -0
  48. photos/-4WLn9giArE.jpg +0 -0
  49. photos/-4qCLz3r1s8.jpg +0 -0
  50. photos/-5WWw6DeQ8w.jpg +0 -0
.gitattributes CHANGED
@@ -1,35 +1 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ unsplash-25k-photos-embeddings.pkl filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.gitignore ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ pip-wheel-metadata/
24
+ share/python-wheels/
25
+ *.egg-info/
26
+ .installed.cfg
27
+ *.egg
28
+ MANIFEST
29
+
30
+ # PyInstaller
31
+ # Usually these files are written by a python script from a template
32
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
33
+ *.manifest
34
+ *.spec
35
+
36
+ # Installer logs
37
+ pip-log.txt
38
+ pip-delete-this-directory.txt
39
+
40
+ # Unit test / coverage reports
41
+ htmlcov/
42
+ .tox/
43
+ .nox/
44
+ .coverage
45
+ .coverage.*
46
+ .cache
47
+ nosetests.xml
48
+ coverage.xml
49
+ *.cover
50
+ *.py,cover
51
+ .hypothesis/
52
+ .pytest_cache/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ target/
76
+
77
+ # Jupyter Notebook
78
+ .ipynb_checkpoints
79
+
80
+ # IPython
81
+ profile_default/
82
+ ipython_config.py
83
+
84
+ # pyenv
85
+ .python-version
86
+
87
+ # pipenv
88
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
90
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
91
+ # install all needed dependencies.
92
+ #Pipfile.lock
93
+
94
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95
+ __pypackages__/
96
+
97
+ # Celery stuff
98
+ celerybeat-schedule
99
+ celerybeat.pid
100
+
101
+ # SageMath parsed files
102
+ *.sage.py
103
+
104
+ # Environments
105
+ .env
106
+ .venv
107
+ env/
108
+ venv/
109
+ ENV/
110
+ env.bak/
111
+ venv.bak/
112
+
113
+ # Spyder project settings
114
+ .spyderproject
115
+ .spyproject
116
+
117
+ # Rope project settings
118
+ .ropeproject
119
+
120
+ # mkdocs documentation
121
+ /site
122
+
123
+ # mypy
124
+ .mypy_cache/
125
+ .dmypy.json
126
+ dmypy.json
127
+
128
+ # Pyre type checker
129
+ .pyre/
LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
README.md CHANGED
@@ -1,13 +1,38 @@
1
  ---
2
- title: Embrace Vision
3
- emoji: 🏢
4
- colorFrom: yellow
5
- colorTo: purple
6
  sdk: gradio
7
- sdk_version: 3.50.2
8
  app_file: app.py
9
  pinned: false
10
- license: apache-2.0
11
  ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: CLIP Image Search
3
+ emoji: 📸
4
+ colorFrom: pink
5
+ colorTo: pink
6
  sdk: gradio
 
7
  app_file: app.py
8
  pinned: false
9
+ python_version: 3.10.10
10
  ---
11
 
12
+ # Configuration
13
+
14
+ `title`: _string_
15
+ Display title for the Space
16
+
17
+ `emoji`: _string_
18
+ Space emoji (emoji-only character allowed)
19
+
20
+ `colorFrom`: _string_
21
+ Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
22
+
23
+ `colorTo`: _string_
24
+ Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
25
+
26
+ `sdk`: _string_
27
+ Can be either `gradio` or `streamlit`
28
+
29
+ `sdk_version` : _string_
30
+ Only applicable for `streamlit` SDK.
31
+ See [doc](https://hf.co/docs/hub/spaces) for more info on supported versions.
32
+
33
+ `app_file`: _string_
34
+ Path to your main application file (which contains either `gradio` or `streamlit` Python code).
35
+ Path is relative to the root of the repository.
36
+
37
+ `pinned`: _boolean_
38
+ Whether the Space stays on top of your list.
__pycache__/app.cpython-311.pyc ADDED
Binary file (13.9 kB). View file
 
app.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import CLIPProcessor, CLIPModel, CLIPTokenizer, BlipProcessor, BlipForConditionalGeneration, pipeline
3
+ from sentence_transformers import SentenceTransformer, util
4
+ import pickle
5
+ import numpy as np
6
+ from PIL import Image, ImageEnhance
7
+ import os
8
+ import io
9
+ import concurrent.futures
10
+ import warnings
11
+
12
+ warnings.filterwarnings("ignore")
13
+
14
+
15
+ class CLIPModelHandler:
16
+ def __init__(self, model_name):
17
+ self.model_name = model_name
18
+ self.img_names, self.img_emb = self.load_precomputed_embeddings()
19
+
20
+ def load_precomputed_embeddings(self):
21
+ emb_filename = 'unsplash-25k-photos-embeddings.pkl'
22
+ with open(emb_filename, 'rb') as fIn:
23
+ img_names, img_emb = pickle.load(fIn)
24
+ return img_names, img_emb
25
+
26
+ def search_text(self, query, top_k=1):
27
+ model = CLIPModel.from_pretrained(self.model_name)
28
+ processor = CLIPProcessor.from_pretrained(self.model_name)
29
+ tokenizer = CLIPTokenizer.from_pretrained(self.model_name)
30
+
31
+ inputs = tokenizer([query], padding=True, return_tensors="pt")
32
+ query_emb = model.get_text_features(**inputs)
33
+ hits = util.semantic_search(query_emb, self.img_emb, top_k=top_k)[0]
34
+
35
+ images = [Image.open(os.path.join("photos/", self.img_names[hit['corpus_id']])) for hit in hits]
36
+ return images
37
+
38
+ def search_image(self, image_path, top_k=1):
39
+ model = CLIPModel.from_pretrained(self.model_name)
40
+ processor = CLIPProcessor.from_pretrained(self.model_name)
41
+
42
+ # Load and preprocess the image
43
+ image = Image.open(image_path)
44
+ inputs = processor(images=image, return_tensors="pt")
45
+
46
+ # Get the image features
47
+ outputs = model(**inputs)
48
+ image_emb = outputs.logits_per_image
49
+
50
+ # Perform semantic search
51
+ hits = util.semantic_search(image_emb, self.img_emb, top_k=top_k)[0]
52
+
53
+ # Retrieve and return the relevant images
54
+ result_images = []
55
+ for hit in hits:
56
+ img = Image.open(os.path.join("photos/", self.img_names[hit['corpus_id']]))
57
+ result_images.append(img)
58
+
59
+ return result_images
60
+
61
+ class BLIPImageCaptioning:
62
+ def __init__(self, blip_model_name):
63
+ self.blip_model_name = blip_model_name
64
+
65
+ def preprocess_image(self, image):
66
+ if isinstance(image, str):
67
+ return Image.open(image).convert('RGB')
68
+ elif isinstance(image, np.ndarray):
69
+ return Image.fromarray(np.uint8(image)).convert('RGB')
70
+ else:
71
+ raise ValueError("Invalid input type for image. Supported types: str (file path) or np.ndarray.")
72
+
73
+ def generate_caption(self, image):
74
+ try:
75
+ model = BlipForConditionalGeneration.from_pretrained(self.blip_model_name).to("cuda")
76
+ processor = BlipProcessor.from_pretrained(self.blip_model_name)
77
+
78
+ raw_image = self.preprocess_image(image)
79
+ inputs = processor(raw_image, return_tensors="pt").to("cuda")
80
+ out = model.generate(**inputs)
81
+ unconditional_caption = processor.decode(out[0], skip_special_tokens=True)
82
+
83
+ return unconditional_caption
84
+ except Exception as e:
85
+ return {"error": str(e)}
86
+
87
+ def generate_captions_parallel(self, images):
88
+ with concurrent.futures.ThreadPoolExecutor() as executor:
89
+ results = list(executor.map(self.generate_caption, images))
90
+
91
+ return results
92
+
93
+
94
+ # Initialize the CLIP model handler
95
+ clip_handler = CLIPModelHandler("openai/clip-vit-base-patch32")
96
+
97
+ # Initialize the zero-shot image classification pipeline
98
+ clip_classifier = pipeline("zero-shot-image-classification", model="openai/clip-vit-base-patch32")
99
+
100
+ # Load BLIP model directly
101
+ blip_processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
102
+ blip_model_name = "Salesforce/blip-image-captioning-base"
103
+
104
+ # Function for text-to-image search
105
+ def text_to_image_interface(query, top_k):
106
+ try:
107
+ # Perform text-to-image search
108
+ result_images = clip_handler.search_text(query, top_k)
109
+
110
+ # Resize images before displaying
111
+ result_images_resized = [image.resize((224, 224)) for image in result_images]
112
+
113
+ # Display more information about the results
114
+ result_info = [{"Image Name": os.path.basename(img_path)} for img_path in clip_handler.img_names]
115
+
116
+ return result_images_resized, result_info
117
+ except Exception as e:
118
+ return gr.Error(f"Error in text-to-image search: {str(e)}")
119
+
120
+
121
+ # Gradio Interface function for zero-shot classification
122
+ def zero_shot_classification(image, labels_text):
123
+ try:
124
+ # Convert image to PIL format
125
+ PIL_image = Image.fromarray(np.uint8(image)).convert('RGB')
126
+
127
+ # Split labels_text into a list of labels
128
+ labels = labels_text.split(",")
129
+
130
+ # Perform zero-shot classification
131
+ res = clip_classifier(images=PIL_image, candidate_labels=labels, hypothesis_template="This is a photo of a {}")
132
+
133
+ # Format the result as a dictionary
134
+ formatted_results = {dic["label"]: dic["score"] for dic in res}
135
+
136
+ return formatted_results
137
+ except Exception as e:
138
+ return gr.Error(f"Error in zero-shot classification: {str(e)}")
139
+
140
+
141
+
142
+ def preprocessing_interface(original_image, brightness_slider, contrast_slider, saturation_slider, sharpness_slider, rotation_slider):
143
+ try:
144
+ # Convert NumPy array to PIL Image
145
+ PIL_image = Image.fromarray(np.uint8(original_image)).convert('RGB')
146
+
147
+ # Normalize slider values to be in the range [0, 1]
148
+ brightness_normalized = brightness_slider / 100.0
149
+ contrast_normalized = contrast_slider / 100.0
150
+ saturation_normalized = saturation_slider / 100.0
151
+ sharpness_normalized = sharpness_slider / 100.0
152
+
153
+ # Apply preprocessing based on user input
154
+ PIL_image = PIL_image.convert("RGB")
155
+ PIL_image = PIL_image.rotate(rotation_slider)
156
+
157
+ # Adjust brightness
158
+ enhancer = ImageEnhance.Brightness(PIL_image)
159
+ PIL_image = enhancer.enhance(brightness_normalized)
160
+
161
+ # Adjust contrast
162
+ enhancer = ImageEnhance.Contrast(PIL_image)
163
+ PIL_image = enhancer.enhance(contrast_normalized)
164
+
165
+ # Adjust saturation
166
+ enhancer = ImageEnhance.Color(PIL_image)
167
+ PIL_image = enhancer.enhance(saturation_normalized)
168
+
169
+ # Adjust sharpness
170
+ enhancer = ImageEnhance.Sharpness(PIL_image)
171
+ PIL_image = enhancer.enhance(sharpness_normalized)
172
+
173
+ # Return the processed image
174
+ return PIL_image
175
+ except Exception as e:
176
+ return gr.Error(f"Error in preprocessing: {str(e)}")
177
+
178
+ def generate_captions(images):
179
+ blip_model = BlipForConditionalGeneration.from_pretrained(blip_model_name).to("cuda")
180
+ blip_processor = BlipProcessor.from_pretrained(blip_model_name)
181
+
182
+ return [blip_model_instance.generate_caption(image) for image in images]
183
+
184
+
185
+ # Gradio Interfaces
186
+ zero_shot_classification_interface = gr.Interface(
187
+ fn=zero_shot_classification,
188
+ inputs=[
189
+ gr.Image(label="Image Query", elem_id="image_input"),
190
+ gr.Textbox(label="Labels (comma-separated)", elem_id="labels_input"),
191
+ ],
192
+ outputs=gr.Label(elem_id="label_image"),
193
+ )
194
+
195
+ text_to_image_interface = gr.Interface(
196
+ fn=text_to_image_interface,
197
+ inputs=[
198
+ gr.Textbox(
199
+ lines=2,
200
+ label="Text Query",
201
+ placeholder="Enter text here...",
202
+ ),
203
+ gr.Slider(0, 5, step=1, label="Top K Results"),
204
+ ],
205
+ outputs=[
206
+ gr.Gallery(
207
+ label="Text-to-Image Search Results",
208
+ elem_id="gallery_text",
209
+ grid_cols=2,
210
+ height="auto",
211
+ ),
212
+ gr.Text(label="Result Information", elem_id="text_info"),
213
+ ],
214
+ )
215
+
216
+ blip_model = BLIPImageCaptioning(blip_model_name) # Instantiate the object
217
+ blip_captioning_interface = gr.Interface(
218
+ fn=blip_model.generate_caption, # Correct the method name
219
+ inputs=gr.Image(label="Image for Captioning", elem_id="blip_caption_image"),
220
+ outputs=gr.Textbox(label="Generated Captions", elem_id="blip_generated_captions", default=""),
221
+ )
222
+
223
+ preprocessing_interface = gr.Interface(
224
+ fn=blip_model.preprocess_image, # Correct the method name
225
+ inputs=[
226
+ gr.Image(label="Original Image", elem_id="original_image"),
227
+ ],
228
+ outputs=[
229
+ gr.Image(label="Processed Image", elem_id="processed_image"),
230
+ ],
231
+ )
232
+
233
+ # Tabbed Interface
234
+ app = gr.TabbedInterface(
235
+ interface_list=[text_to_image_interface, zero_shot_classification_interface, blip_captioning_interface],
236
+ tab_names=["Text-to-Image Search", "Zero-Shot Classification", "BLIP Image Captioning"],
237
+ )
238
+
239
+ # Launch the Gradio interface
240
+ app.launch(debug=True, share="true")
features_extraction.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from torchvision import models, transforms
3
+ from PIL import Image
4
+ import pickle
5
+ import os
6
+ from tqdm import tqdm # Import tqdm for the progress bar
7
+
8
+ # Load a pretrained ResNet model
9
+ model = models.resnet50(pretrained=True)
10
+ model = model.eval()
11
+
12
+ # Define preprocessing transforms
13
+ preprocess = transforms.Compose([
14
+ transforms.Resize((224, 224)),
15
+ transforms.ToTensor(),
16
+ transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
17
+ ])
18
+
19
+ # Function to extract features from an image
20
+ def extract_features(image_path):
21
+ image = Image.open(image_path).convert('RGB')
22
+ input_tensor = preprocess(image)
23
+ input_batch = input_tensor.unsqueeze(0)
24
+
25
+ with torch.no_grad():
26
+ output = model(input_batch)
27
+
28
+ return output.squeeze().numpy()
29
+
30
+ # Directory containing your images
31
+ images_directory = "photos/"
32
+
33
+ # Process each image and save features
34
+ image_features = {}
35
+ for filename in tqdm(os.listdir(images_directory), desc="Processing Images"):
36
+ if filename.endswith(".jpg") or filename.endswith(".png"):
37
+ image_path = os.path.join(images_directory, filename)
38
+ features = extract_features(image_path)
39
+ image_features[filename] = features
40
+
41
+ # Save the features to a pickle file
42
+ output_file = "unsplash-25k-embeddings.pkl"
43
+ with open(output_file, 'wb') as f:
44
+ pickle.dump(image_features, f)
flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/4f98da9ec0f689294825687b767e5bd6abddd769/image.png ADDED
flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/51731869eb38daa90d2c38be3879322cffd0e5a8/image.png ADDED
flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/873bcc5d4e101e2df7ec1db2a29b17dc0b13f072/image.png ADDED
flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/captions.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\28b9820a-4b94-4951-8cde-6128980d10c6\\4f98da9ec0f689294825687b767e5bd6abddd769\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\28b9820a-4b94-4951-8cde-6128980d10c6\\51731869eb38daa90d2c38be3879322cffd0e5a8\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\28b9820a-4b94-4951-8cde-6128980d10c6\\873bcc5d4e101e2df7ec1db2a29b17dc0b13f072\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\28b9820a-4b94-4951-8cde-6128980d10c6\\eb1653eec598be50fd369b3acc91857aa20a6c78\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\28b9820a-4b94-4951-8cde-6128980d10c6\\cc007a660fb213a4a5ca9b43229bc21f28fa5792\\image.png": null}
flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/cc007a660fb213a4a5ca9b43229bc21f28fa5792/image.png ADDED
flagged/Text-to-Image Search Results/28b9820a-4b94-4951-8cde-6128980d10c6/eb1653eec598be50fd369b3acc91857aa20a6c78/image.png ADDED
flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/00d27721ded93f7833b7b8ccc2d98bd113c8f73b/image.png ADDED
flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/1f6730d26ffc4df0fea7cf4d3d8e8f2f6c6521cd/image.png ADDED
flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/9b260c660faf1bf2cf4485aa65897526cc03d2d3/image.png ADDED
flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/captions.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\578ec4d5-4c30-46d9-99b2-de30a7bf78d0\\00d27721ded93f7833b7b8ccc2d98bd113c8f73b\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\578ec4d5-4c30-46d9-99b2-de30a7bf78d0\\cf2a6f967f1557bc9ca891594273f74ef7347f5d\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\578ec4d5-4c30-46d9-99b2-de30a7bf78d0\\9b260c660faf1bf2cf4485aa65897526cc03d2d3\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\578ec4d5-4c30-46d9-99b2-de30a7bf78d0\\dab41f873239f5a039d53fe0d6c1d4f1f0c3034e\\image.png": null, "C:\\Users\\basel\\OneDrive\\Desktop\\CLIP-Model-Image-Search\\flagged\\Text-to-Image Search Results\\578ec4d5-4c30-46d9-99b2-de30a7bf78d0\\1f6730d26ffc4df0fea7cf4d3d8e8f2f6c6521cd\\image.png": null}
flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/cf2a6f967f1557bc9ca891594273f74ef7347f5d/image.png ADDED
flagged/Text-to-Image Search Results/578ec4d5-4c30-46d9-99b2-de30a7bf78d0/dab41f873239f5a039d53fe0d6c1d4f1f0c3034e/image.png ADDED
flagged/log.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ Text Query,Top K Results,Text-to-Image Search Results,flag,username,timestamp
2
+ man riding a horse,5,C:\Users\basel\OneDrive\Desktop\CLIP-Model-Image-Search\flagged\Text-to-Image Search Results\578ec4d5-4c30-46d9-99b2-de30a7bf78d0,,,2023-10-27 19:01:31.140298
3
+ man riding a horse,5,C:\Users\basel\OneDrive\Desktop\CLIP-Model-Image-Search\flagged\Text-to-Image Search Results\28b9820a-4b94-4951-8cde-6128980d10c6,,,2023-10-27 19:01:36.052210
photos/--2IBUMom1I.jpg ADDED
photos/--6JlGcHl-w.jpg ADDED
photos/--Jy_8mvs4E.jpg ADDED
photos/--SDX4KWIbA.jpg ADDED
photos/--Tn3E5ZtfQ.jpg ADDED
photos/--e3kJUMSZw.jpg ADDED
photos/--kGuWTwn48.jpg ADDED
photos/--lzOIJ-a4U.jpg ADDED
photos/-0YZgPxq04k.jpg ADDED
photos/-0_ww2ACIw8.jpg ADDED
photos/-0eINgEiNw4.jpg ADDED
photos/-12cgSu9HW0.jpg ADDED
photos/-1a83VD65ss.jpg ADDED
photos/-1lMrIXAn6Q.jpg ADDED
photos/-1qb8SIBzKY.jpg ADDED
photos/-2ii0_ctxpQ.jpg ADDED
photos/-2loC3xzDF8.jpg ADDED
photos/-2pFSIxX9ow.jpg ADDED
photos/-3IZERJGsm4.jpg ADDED
photos/-3LtGq_RPcY.jpg ADDED
photos/-3cTY-Q6k88.jpg ADDED
photos/-3l6KX8uCAM.jpg ADDED
photos/-3qSsolbivo.jpg ADDED
photos/-3uIUqsR-Rw.jpg ADDED
photos/-43qvNitz5k.jpg ADDED
photos/-4AR-vVjAbM.jpg ADDED
photos/-4UwhAr4KYg.jpg ADDED
photos/-4WLn9giArE.jpg ADDED
photos/-4qCLz3r1s8.jpg ADDED
photos/-5WWw6DeQ8w.jpg ADDED