Update app.py
Browse files
app.py
CHANGED
@@ -1,33 +1,250 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
import streamlit as st
|
5 |
import numpy as np
|
|
|
|
|
|
|
|
|
6 |
import pandas as pd
|
7 |
-
import matplotlib.pyplot as plt
|
8 |
-
import cv2
|
9 |
|
10 |
-
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
-
#
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
-
#
|
17 |
-
|
18 |
|
19 |
-
|
20 |
-
if uploaded_file is not None:
|
21 |
-
query_image = cv2.imread(uploaded_file.name)
|
22 |
-
else:
|
23 |
-
query_image = cv2.imread(selected_image)
|
24 |
|
25 |
-
|
26 |
-
|
|
|
|
|
|
|
|
|
27 |
|
28 |
-
# Use the similarity search system to find the most similar images
|
29 |
-
similar_images = find_similar_images(query_image)
|
30 |
|
31 |
-
|
32 |
-
|
33 |
-
st.image(image, caption="Similar Image", use_column_width=True)
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
import numpy as np
|
3 |
+
import sys
|
4 |
+
|
5 |
+
import time
|
6 |
+
import math
|
7 |
import pandas as pd
|
|
|
|
|
8 |
|
9 |
+
from utils import *
|
10 |
+
from catalogue_operations import *
|
11 |
+
|
12 |
+
#from count_sessions import count_sessions
|
13 |
+
#count_sessions()
|
14 |
+
|
15 |
+
def main():
|
16 |
+
st.markdown(
|
17 |
+
"""
|
18 |
+
<style>
|
19 |
+
[data-testid="stSidebar"][aria-expanded="true"] > div:first-child {
|
20 |
+
width: 250px;
|
21 |
+
}
|
22 |
+
[data-testid="stSidebar"][aria-expanded="false"] > div:first-child {
|
23 |
+
width: 250px;
|
24 |
+
margin-left: -250px;
|
25 |
+
}
|
26 |
+
</style>
|
27 |
+
""",
|
28 |
+
unsafe_allow_html=True,
|
29 |
+
)
|
30 |
+
|
31 |
+
header_cols = st.columns((1))
|
32 |
+
header_cols[0].title("Welcome to Galaxy Finder")
|
33 |
+
header_cols[0].markdown(
|
34 |
+
"""
|
35 |
+
Created by [George Stein](https://georgestein.github.io/)
|
36 |
+
""")
|
37 |
+
|
38 |
+
display_method = header_cols[-1].button('Interested in learning how this works?')
|
39 |
+
if display_method:
|
40 |
+
describe_method()
|
41 |
+
else:
|
42 |
+
galaxy_search()
|
43 |
+
|
44 |
+
|
45 |
+
def describe_method():
|
46 |
+
st.button('Back to Galaxy Finder')
|
47 |
+
|
48 |
+
st.markdown(
|
49 |
+
"""
|
50 |
+
### A bit about the method:
|
51 |
+
- The similarity of two images is quite easy to judge by eye - but writing an algorithm to do the same is not as easy as one might think! This is because as hunans we can easily identify and understand what object is in the image.
|
52 |
+
- A machine is different - it simply looks individual pixel values. Yet two images that to us have very similar properties and appearences will likely have vastly different pixel values. For example, imagine rotating a galaxy image by 90 degrees. It it obviously still the same galaxy, but the pixel values have completeley changed.
|
53 |
+
- So the first step is to teach a computer to understand what is actually in the image on a deeper level than just looking at pixel values. Unfortunately we do not have any information alongside the image specifying what type of galaxy is actually in it - so where do we start?
|
54 |
+
- We used a type of machine learning called "self-supervised representation learning" to boil down each image into a concentrated vector of information, or "representation", that encapsulates the appearance and properties of the galaxy.
|
55 |
+
- Self-supervised learning works by creating multiple versions of each image which approximate the observational symmetries, errors, and uncertainties within the dataset, such as image rotations, adding noise, blurring it, etc., and then teaching the machine to learn the same representation for all these versions of the same galaxy. In this way, we move beyond looking at pixel values, and teach the machine a deeper understanding of the image.
|
56 |
+
- Once we have trained the machine learning model on millions of galaxies we calculate and save the representation of every image in the dataset, and precompute the similarity of any two galaxies. Then, you tell us what galaxy to use as a starting point, we find the representation belonging to the image of that galaxy, compare it to millions of other representations from all the other galaxies, and return the most similar images!
|
57 |
+
|
58 |
+
**Please see [our overview paper](https://arxiv.org/abs/2110.13151) for more technical details, or see our recent application of the app to find [strong gravitational lenses](https://arxiv.org/abs/2012.13083) -- some of the rarest and most interesting objects in the universe!**
|
59 |
+
|
60 |
+
Dataset:
|
61 |
+
|
62 |
+
- We used galaxy images from [DECaLS DR9](https://www.legacysurvey.org/), randomly sampling 3.5 million galaxies to train the machine learning model. We then apply it on every galaxy in the dataset, about 42 million galaxies with z-band magnitude < 20, so most bright things in the sky should be included, with very dim and small objects likely missing - more to come soon!
|
63 |
+
- The models were trained using images of size 96 pixels by 96 pixels centered on the galaxy. So features outside of this central region are not used to calculate the similarity, but are sometimes nice to look at
|
64 |
+
Please note this project is ongoing, and results will continue to be updated and improved.
|
65 |
+
Created by [George Stein](https://georgestein.github.io/)
|
66 |
+
"""
|
67 |
+
)
|
68 |
+
st.button('Back to Galaxy Finder', key='galaxies') # will change state and hence trigger rerun and hence reset should_tell_me_more
|
69 |
+
|
70 |
+
|
71 |
+
def galaxy_search():
|
72 |
+
|
73 |
+
# Hardcode parameter options
|
74 |
+
ra_unit_formats = 'degrees or HH:MM:SS'
|
75 |
+
dec_unit_formats = 'degrees or DD:MM:SS'
|
76 |
+
|
77 |
+
similarity_types = ['most similar', 'least similar']
|
78 |
+
|
79 |
+
# choices for number of images to display
|
80 |
+
num_nearest_vals = [i**2 for i in range(4, 11)]
|
81 |
+
|
82 |
+
# maximum number of similar objects allowed in data table
|
83 |
+
num_nearest_max = 1000
|
84 |
+
|
85 |
+
npix_types = [96, 152, 256]
|
86 |
+
|
87 |
+
model_versions = ['v1', 'v2']
|
88 |
+
|
89 |
+
# don't use galaxies up to this index, as lots can have weird observing errors
|
90 |
+
index_use_min = 2500
|
91 |
+
|
92 |
+
# Read in selected options and run program
|
93 |
+
tstart = time.time()
|
94 |
+
|
95 |
+
with st.sidebar.expander('Instructions'):
|
96 |
+
st.markdown(
|
97 |
+
"""
|
98 |
+
**Enter the coordinates of your favourite galaxy and we'll search for the most similar looking ones in the universe!**
|
99 |
+
|
100 |
+
Click the 'search random galaxy' button, or try finding a cool galaxy at [legacysurvey.org](https://www.legacysurvey.org/viewer)
|
101 |
+
- Use the south survey (select the <Legacy Surveys DR9-south images> option). Currently not all galaxies are included, but most bright ones should be.
|
102 |
+
"""
|
103 |
+
)
|
104 |
+
#st.sidebar.markdown('### Set up and submit your query!')
|
105 |
+
|
106 |
+
ra_search = st.sidebar.text_input('RA', key='ra',
|
107 |
+
help="Right Ascension of query galaxy ({:s})".format(ra_unit_formats),
|
108 |
+
value='199.3324')
|
109 |
+
dec_search = st.sidebar.text_input('Dec', key='dec',
|
110 |
+
help="Declination of query galaxy ({:s})".format(dec_unit_formats),
|
111 |
+
value='20.6382')
|
112 |
+
|
113 |
+
ra_search, dec_search = radec_string_to_degrees(ra_search, dec_search, ra_unit_formats, dec_unit_formats)
|
114 |
+
|
115 |
+
# similarity_option = st.sidebar.selectbox(
|
116 |
+
# 'Want to see the most similar galaxies or the least similar?',
|
117 |
+
# similarity_types)
|
118 |
+
|
119 |
+
num_nearest = st.sidebar.select_slider('Number of similar galaxies to display', num_nearest_vals)
|
120 |
+
|
121 |
+
npix_show = st.sidebar.select_slider('Image size (pixels)', npix_types, value=npix_types[1])
|
122 |
+
|
123 |
+
model_version = st.sidebar.select_slider('Model version', model_versions, value=model_versions[-1])
|
124 |
+
|
125 |
+
num_similar_query = 1000
|
126 |
+
|
127 |
+
similarity_inv = False
|
128 |
+
#if similarity_option == 'least similar':
|
129 |
+
# similarity_inv = True
|
130 |
+
|
131 |
+
start_search = st.sidebar.button('Search query')
|
132 |
+
start_search_random = st.sidebar.button('Search random galaxy')
|
133 |
+
|
134 |
+
# load in full datasets needed
|
135 |
+
LC = LoadCatalogue()
|
136 |
+
cat = LC.download_catalogue_files(include_extra_features=True)
|
137 |
+
|
138 |
+
#cat = LC.load_catalogue_coordinates(include_extra_features=True)
|
139 |
+
ngals_tot = cat['ngals_tot']
|
140 |
+
|
141 |
+
# Set up class containing search operations
|
142 |
+
CAT = Catalogue(cat)
|
143 |
+
|
144 |
+
|
145 |
+
# start search when prompted by user
|
146 |
+
if start_search or start_search_random:
|
147 |
+
if start_search_random:
|
148 |
+
# Galaxies are sorted by brightness, so earlier ones are more interesting to look at
|
149 |
+
# Sample with this in mind by using lognormal distribution
|
150 |
+
|
151 |
+
ind_max = ngals_tot-1
|
152 |
+
ind_random = 0
|
153 |
+
while (ind_random < index_use_min) or (ind_random > ind_max):
|
154 |
+
#ind_random = int(np.random.lognormal(10., 2.)) # strongly biased towards bright galaxies
|
155 |
+
ind_random = int(np.random.lognormal(12., 3.)) # biased towards bright galaxies
|
156 |
+
|
157 |
+
radec_random = CAT.load_from_catalogue_indices(include_extra_features=False,
|
158 |
+
inds_load=[ind_random])
|
159 |
+
ra_search = radec_random['ra'][0]
|
160 |
+
dec_search = radec_random['dec'][0]
|
161 |
+
|
162 |
+
# Find index of closest galaxy to search location. This galaxy becomes query
|
163 |
+
CAT.search_catalogue(ra_search, dec_search)
|
164 |
+
|
165 |
+
print('Galaxy index used= ', CAT.query_ind)
|
166 |
+
# Find indexes of similar galaxies to query
|
167 |
+
#st.write('Searching through the brightest {:,} galaxies in the DECaLS survey to find the most similar to your request. More to come soon!'.format(ngals_tot))
|
168 |
+
|
169 |
+
CAT.similarity_search(nnearest=num_similar_query+1,
|
170 |
+
similarity_inv=similarity_inv,
|
171 |
+
model_version=model_version) # +1 to include self
|
172 |
+
|
173 |
+
# Get info for similar objects
|
174 |
+
similarity_catalogue = CAT.load_from_catalogue_indices(include_extra_features=True)
|
175 |
+
similarity_catalogue['similarity'] = CAT.similarity_score
|
176 |
+
|
177 |
+
# Get urls from legacy survey
|
178 |
+
urls = urls_from_coordinates(similarity_catalogue, npix=npix_show)
|
179 |
+
similarity_catalogue['url'] = np.array(urls)
|
180 |
+
|
181 |
+
# Plot query image. Put in center columns to ensure it remains centered upon display
|
182 |
+
|
183 |
+
ncolumns = min(11, int(math.ceil(np.sqrt(num_nearest))))
|
184 |
+
nrows = int(math.ceil(num_nearest/ncolumns))
|
185 |
+
|
186 |
+
lab = 'Query galaxy'
|
187 |
+
lab_radec = 'RA, Dec = ({:.4f}, {:.4f})'.format(similarity_catalogue['ra'][0], similarity_catalogue['dec'][0])
|
188 |
+
cols = st.columns([2]+[1*ncolumns])
|
189 |
+
cols[0].subheader(lab)
|
190 |
+
cols[1].subheader('Most similar galaxies')
|
191 |
+
|
192 |
+
cols = st.columns([2]+[1]*ncolumns)
|
193 |
+
cols[0].image(urls[0],
|
194 |
+
use_column_width='always',
|
195 |
+
caption=lab_radec)#use_column_width='auto')
|
196 |
+
# plot rest of images in smaller grid format
|
197 |
+
|
198 |
+
|
199 |
+
iimg = 1 # start at 1 as we already included first image above
|
200 |
+
for irow in range(nrows):
|
201 |
+
for icol in range(ncolumns):
|
202 |
+
url = urls[iimg]
|
203 |
+
lab = 'Similarity={:.2f}\n'.format(similarity_catalogue['similarity'][iimg]) #+ lab
|
204 |
+
if ncolumns > 5:
|
205 |
+
lab = None
|
206 |
+
|
207 |
+
# add image to grid
|
208 |
+
cols[icol+1].image(url, caption=lab, use_column_width='always')
|
209 |
+
iimg += 1
|
210 |
+
|
211 |
+
# convert similarity_catalogue to pandas dataframe to display and download
|
212 |
+
bands = ['g', 'r', 'z']
|
213 |
+
|
214 |
+
similarity_catalogue_out = {} # split > 1D arrays into 1D columns
|
215 |
+
for k, v in similarity_catalogue.items():
|
216 |
+
# assume max dimensionality of 2
|
217 |
+
if v.ndim == 2:
|
218 |
+
for iband in range(v.shape[1]):
|
219 |
+
similarity_catalogue_out['{:s}_{:s}'.format(k, bands[iband])] = v[:, iband]
|
220 |
+
|
221 |
+
else:
|
222 |
+
similarity_catalogue_out[k] = v
|
223 |
+
|
224 |
+
# convert format of source_type, else does not show properly in table
|
225 |
+
similarity_catalogue_out['source_type'] = similarity_catalogue_out['source_type'].astype('str')
|
226 |
+
df = pd.DataFrame.from_dict(similarity_catalogue_out)
|
227 |
|
228 |
+
# Sort columns to lead with the most useful ones
|
229 |
+
cols_leading = ['ra', 'dec', 'similarity']
|
230 |
+
cols = cols_leading + [col for col in df if col not in cols_leading]
|
231 |
+
df = df[cols]
|
232 |
+
|
233 |
+
# display table
|
234 |
+
st.write(df.head(num_nearest_max))#vals[-1]))
|
235 |
|
236 |
+
# show a downloadable link
|
237 |
+
st.markdown(get_table_download_link(df), unsafe_allow_html=True)
|
238 |
|
239 |
+
tend = time.time()
|
|
|
|
|
|
|
|
|
240 |
|
241 |
+
st.set_page_config(
|
242 |
+
page_title='Galaxy Finder',
|
243 |
+
## page_icon='GEORGE',
|
244 |
+
layout="wide",
|
245 |
+
initial_sidebar_state="expanded",
|
246 |
+
)
|
247 |
|
|
|
|
|
248 |
|
249 |
+
if __name__ == '__main__':
|
250 |
+
main()
|
|