introvoyz041 commited on
Commit
56e7fff
·
verified ·
1 Parent(s): 835abeb

Migrated from GitHub

Browse files
Files changed (35) hide show
  1. .gitattributes +24 -0
  2. data/LICENSE +21 -0
  3. data/data/expansion_factors.csv +3 -0
  4. data/data/registration/post_expansion/example1.tif +3 -0
  5. data/data/registration/post_expansion/example2.tif +3 -0
  6. data/data/registration/pre_expansion/example1.tif +3 -0
  7. data/data/registration/pre_expansion/example2.tif +3 -0
  8. data/data/segmentation_data.csv +3 -0
  9. data/data/trailmap_volumes/seg-example1.tif/seg-example1-1.tif +3 -0
  10. data/data/trailmap_volumes/seg-example1.tif/seg-example1-10.tif +3 -0
  11. data/data/trailmap_volumes/seg-example1.tif/seg-example1-2.tif +3 -0
  12. data/data/trailmap_volumes/seg-example1.tif/seg-example1-3.tif +3 -0
  13. data/data/trailmap_volumes/seg-example1.tif/seg-example1-4.tif +3 -0
  14. data/data/trailmap_volumes/seg-example1.tif/seg-example1-5.tif +3 -0
  15. data/data/trailmap_volumes/seg-example1.tif/seg-example1-6.tif +3 -0
  16. data/data/trailmap_volumes/seg-example1.tif/seg-example1-7.tif +3 -0
  17. data/data/trailmap_volumes/seg-example1.tif/seg-example1-8.tif +3 -0
  18. data/data/trailmap_volumes/seg-example1.tif/seg-example1-9.tif +3 -0
  19. data/data/trailmap_volumes/seg-example2.tif/seg-example2-1.tif +3 -0
  20. data/data/trailmap_volumes/seg-example2.tif/seg-example2-10.tif +3 -0
  21. data/data/trailmap_volumes/seg-example2.tif/seg-example2-2.tif +3 -0
  22. data/data/trailmap_volumes/seg-example2.tif/seg-example2-3.tif +3 -0
  23. data/data/trailmap_volumes/seg-example2.tif/seg-example2-4.tif +3 -0
  24. data/data/trailmap_volumes/seg-example2.tif/seg-example2-5.tif +3 -0
  25. data/data/trailmap_volumes/seg-example2.tif/seg-example2-6.tif +3 -0
  26. data/data/trailmap_volumes/seg-example2.tif/seg-example2-7.tif +3 -0
  27. data/data/trailmap_volumes/seg-example2.tif/seg-example2-8.tif +3 -0
  28. data/data/trailmap_volumes/seg-example2.tif/seg-example2-9.tif +3 -0
  29. data/process_batch.py +43 -0
  30. data/processing/__init__.py +2 -0
  31. data/processing/measure.py +32 -0
  32. data/processing/process.py +31 -0
  33. data/register_batch.py +34 -0
  34. data/registration/__init__.py +1 -0
  35. data/registration/rigid_registration.py +56 -0
.gitattributes CHANGED
@@ -57,3 +57,27 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
+ data/data/registration/post_expansion/example1.tif filter=lfs diff=lfs merge=lfs -text
61
+ data/data/registration/post_expansion/example2.tif filter=lfs diff=lfs merge=lfs -text
62
+ data/data/registration/pre_expansion/example1.tif filter=lfs diff=lfs merge=lfs -text
63
+ data/data/registration/pre_expansion/example2.tif filter=lfs diff=lfs merge=lfs -text
64
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-1.tif filter=lfs diff=lfs merge=lfs -text
65
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-10.tif filter=lfs diff=lfs merge=lfs -text
66
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-2.tif filter=lfs diff=lfs merge=lfs -text
67
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-3.tif filter=lfs diff=lfs merge=lfs -text
68
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-4.tif filter=lfs diff=lfs merge=lfs -text
69
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-5.tif filter=lfs diff=lfs merge=lfs -text
70
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-6.tif filter=lfs diff=lfs merge=lfs -text
71
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-7.tif filter=lfs diff=lfs merge=lfs -text
72
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-8.tif filter=lfs diff=lfs merge=lfs -text
73
+ data/data/trailmap_volumes/seg-example1.tif/seg-example1-9.tif filter=lfs diff=lfs merge=lfs -text
74
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-1.tif filter=lfs diff=lfs merge=lfs -text
75
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-10.tif filter=lfs diff=lfs merge=lfs -text
76
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-2.tif filter=lfs diff=lfs merge=lfs -text
77
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-3.tif filter=lfs diff=lfs merge=lfs -text
78
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-4.tif filter=lfs diff=lfs merge=lfs -text
79
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-5.tif filter=lfs diff=lfs merge=lfs -text
80
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-6.tif filter=lfs diff=lfs merge=lfs -text
81
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-7.tif filter=lfs diff=lfs merge=lfs -text
82
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-8.tif filter=lfs diff=lfs merge=lfs -text
83
+ data/data/trailmap_volumes/seg-example2.tif/seg-example2-9.tif filter=lfs diff=lfs merge=lfs -text
data/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2021 Kamran Ahmed
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data/data/expansion_factors.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ id,expansion_factor
2
+ example1,4.172762498448865
3
+ example2,4.234239813334196
data/data/registration/post_expansion/example1.tif ADDED

Git LFS Details

  • SHA256: 214328be16d24b213b2c3f4f0d5f986cd5755a76cbbf5ee6bd7de11c86b00e93
  • Pointer size: 132 Bytes
  • Size of remote file: 2.14 MB
data/data/registration/post_expansion/example2.tif ADDED

Git LFS Details

  • SHA256: 3a633b7118473d2f7bae4f9f2c6b5d15e4a54b8951cc7394b1b2bdf809d7d91a
  • Pointer size: 132 Bytes
  • Size of remote file: 2.14 MB
data/data/registration/pre_expansion/example1.tif ADDED

Git LFS Details

  • SHA256: 1241139dc179a96c9028b8bffea9c78bdd3b4ea6b2837d3dad5ec193ff823eb6
  • Pointer size: 132 Bytes
  • Size of remote file: 2.14 MB
data/data/registration/pre_expansion/example2.tif ADDED

Git LFS Details

  • SHA256: fe0debec43ab7328e2398250ef924e07f9c63e67ffa98c53ab8dea5501a4f4a9
  • Pointer size: 132 Bytes
  • Size of remote file: 2.14 MB
data/data/segmentation_data.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ id,image_volume (um3),axon_volume (um3),axon_length (um),avg_axon_radius (um)
2
+ seg-example1.tif,2192.190448787476,40.14157371180198,676.9529106394549,0.10804224759340286
3
+ seg-example2.tif,2098.084266317545,97.1325822783973,1589.9739226329862,0.11063189804553986
data/data/trailmap_volumes/seg-example1.tif/seg-example1-1.tif ADDED

Git LFS Details

  • SHA256: cc9aaddae129de92c3f36bfdcbb11aba12a79f98584de9407ea270e516539fd1
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-10.tif ADDED

Git LFS Details

  • SHA256: 49e3d9526539aacdc32d2c3a3f1cafe72da8f9a758fa7e950bcc4fce4b4fd158
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-2.tif ADDED

Git LFS Details

  • SHA256: 0766d02d41bd73351c15b726cdba8271adea156df2f1d14b2fb03a452fc8da8f
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-3.tif ADDED

Git LFS Details

  • SHA256: 6cb6a577f0ac65c6869e84336332cfa60c3f3a82ff7e3cee4bfbc33ce6859289
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-4.tif ADDED

Git LFS Details

  • SHA256: 3b562758710906f516a43b1ee231be2298758958c84025f45710dfec1fe21237
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-5.tif ADDED

Git LFS Details

  • SHA256: b9d5e9d7294f6baa1a70f33e89885751c7374b8f3f856bf22426c3d3db44f238
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-6.tif ADDED

Git LFS Details

  • SHA256: 1bb1cd59ce1508d49afd7e8e9b6dc3cc57cda42ac4939488833aa8ba95186b65
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-7.tif ADDED

Git LFS Details

  • SHA256: acaf7cb6def6407cb642bda5f45df2e4d398f107409d1774df61cb8a3b340edb
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-8.tif ADDED

Git LFS Details

  • SHA256: b4dd0192ef2b41fc21831755ac54ef9cc2baf5843cfba4a03ba23e2e2ae81b62
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example1.tif/seg-example1-9.tif ADDED

Git LFS Details

  • SHA256: f767a50c54757db4dd716ec57e4c1ce359aa9df05fde5df246dfaa70c0875beb
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-1.tif ADDED

Git LFS Details

  • SHA256: b0ff6ac1cc5c2a21bab8844eb941ab9659e4ef1fb1ad8264147630f47884e085
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-10.tif ADDED

Git LFS Details

  • SHA256: 51b5b3c054e21a075ff773a296ce5a94ca8b964161221e47e553846a03f8c37c
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-2.tif ADDED

Git LFS Details

  • SHA256: 82fe3f20d64113ed7fe4911ba672db243c936824991caf3c3464eac402ed3b3b
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-3.tif ADDED

Git LFS Details

  • SHA256: 1325176adf22388213b1029c4472d5aeba3aedf08684ccde3bf92e76e1035527
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-4.tif ADDED

Git LFS Details

  • SHA256: 577ccd1f4ff78e25674f1cee11727699b1bee9f06d50ff7db0ead57be0b20a2a
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-5.tif ADDED

Git LFS Details

  • SHA256: d2d0b3be43d3c333a223f8bbe23eb084961314d5c732f8a71f29679ffbdea5e9
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-6.tif ADDED

Git LFS Details

  • SHA256: 6b1532c69d987deb4cdd9e4bfe2bb62d09a35ca6e10c19801a1f00b785374a64
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-7.tif ADDED

Git LFS Details

  • SHA256: 1d4bc100bcfe2a2ef934e36165d210deff0a8f356aa51194a3dad4c582c593bc
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-8.tif ADDED

Git LFS Details

  • SHA256: 0c50c2ab2d35cf226a8b40add463febcfd9ae86cd4139161e89ddc36ca3d7d34
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/data/trailmap_volumes/seg-example2.tif/seg-example2-9.tif ADDED

Git LFS Details

  • SHA256: b39e9abeaa5fb012193d46794ab278fc0c6daf0186957100359e3d046571cc20
  • Pointer size: 133 Bytes
  • Size of remote file: 14.7 MB
data/process_batch.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pandas as pd
3
+ import sys
4
+ from processing import process_volume, measure
5
+
6
+ if __name__ == "__main__":
7
+ base_path = os.path.abspath(__file__ + "/..")
8
+
9
+ expansion_factors = pd.read_csv(base_path + "/data/expansion_factors.csv")
10
+ expansion_factors_dict = {}
11
+
12
+ input_batch = sys.argv[1:]
13
+
14
+ for input_folder in input_batch:
15
+ if not os.path.isdir(input_folder):
16
+ raise Exception(input_folder + " is not a directory. Inputs must be a folder of files.")
17
+
18
+ row = expansion_factors[expansion_factors["id"] == os.path.basename(input_folder)[4:-4]]
19
+ expansion_factor = row["expansion_factor"].values[0]
20
+
21
+ if not expansion_factor:
22
+ raise Exception(input_folder + " does not have an expansion factor.")
23
+
24
+ expansion_factors_dict[input_folder] = expansion_factor
25
+
26
+ assert len(input_batch) == len(expansion_factors_dict), "Collisions when creating expansion_factor dictionary (i.e. one to many relationship)."
27
+
28
+ print(f"Number of volumes: {len(expansion_factors_dict)}")
29
+
30
+ save_file = base_path + "/data/segmentation_data.csv"
31
+
32
+ with open(save_file, "w") as f:
33
+ f.write("id,image_volume (um3),axon_volume (um3),axon_length (um),avg_axon_radius (um)\n")
34
+
35
+ for input_folder, expansion_factor in expansion_factors_dict.items():
36
+ name = os.path.basename(input_folder)
37
+ print(f"Processing {name}")
38
+
39
+ vol = process_volume(input_folder)
40
+ data = measure(vol, expansion_factor)
41
+
42
+ with open(save_file, "a") as f:
43
+ f.write(f"{name},{data[0]},{data[1]},{data[2]},{data[3]}\n")
data/processing/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from processing.measure import measure
2
+ from processing.process import process_volume
data/processing/measure.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from edt import edt3d
3
+ from skan import Skeleton, summarize
4
+ from skimage.morphology import skeletonize_3d
5
+
6
+
7
+ """
8
+ Lateral and axial lightsheet resolution (microns).
9
+ """
10
+ x_res = 0.091000116097948115
11
+ z_res = 0.52175056847846326
12
+
13
+
14
+ def measure(vol, expansion_factor):
15
+ voxel_size = x_res * x_res * z_res
16
+ sampling = (z_res, x_res, x_res)
17
+
18
+ normalized_voxel_size = voxel_size / (expansion_factor ** 3)
19
+ normalized_sampling = tuple(dim / expansion_factor for dim in sampling)
20
+
21
+ total_img_volume = vol.size * normalized_voxel_size
22
+ total_axon_volume = np.count_nonzero(vol) * normalized_voxel_size
23
+
24
+ skeleton = skeletonize_3d(vol)
25
+ branch_data = summarize(Skeleton(skeleton, spacing=normalized_sampling))
26
+ total_axon_length = branch_data["branch-distance"].sum()
27
+
28
+ # significantly faster than scipy.ndimage.distance_transform_edt
29
+ distance_transform = edt3d(vol, anisotropy=normalized_sampling, black_border=False, order="C", parallel=10)
30
+ avg_axon_radius = np.mean(distance_transform[skeleton.astype(bool)])
31
+
32
+ return [total_img_volume, total_axon_volume, total_axon_length, avg_axon_radius]
data/processing/process.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import numpy as np
3
+ import os
4
+ from skimage.morphology import remove_small_objects
5
+
6
+
7
+ def read_folder_volume(path):
8
+ tiffs = [os.path.join(path, f) for f in os.listdir(path) if f[0] != '.']
9
+ fnames = sorted(tiffs)
10
+
11
+ vol = []
12
+
13
+ for i, fname in enumerate(fnames):
14
+ img = cv2.imread(fname, cv2.COLOR_BGR2GRAY)
15
+ vol.append(img)
16
+
17
+ vol = np.array(vol)
18
+
19
+ return vol
20
+
21
+
22
+ def binarize(array, threshold_value):
23
+ return (array > threshold_value)
24
+
25
+
26
+ def process_volume(path):
27
+ vol = read_folder_volume(path)
28
+ threshold = binarize(vol, 0.7)
29
+ filtered = remove_small_objects(threshold, min_size=256, connectivity=3)
30
+
31
+ return filtered
data/register_batch.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import os
3
+ from registration import rigid_registration
4
+
5
+
6
+ def get_images(path):
7
+ images = [os.path.join(path, f) for f in os.listdir(path) if f[0] != "."]
8
+ return sorted(images)
9
+
10
+
11
+ if __name__ == "__main__":
12
+ base_path = os.path.abspath(__file__ + "/..")
13
+
14
+ pre_folder = get_images(base_path + "/data/registration/pre_expansion")
15
+ post_folder = get_images(base_path + "/data/registration/post_expansion")
16
+
17
+ assert len(pre_folder) == len(post_folder), "Unequal number of images. Pre- and post-expansion directories must have a 1-to-1 matching of files."
18
+
19
+ save_file = base_path + "/data/expansion_factors.csv"
20
+
21
+ with open(save_file, "w") as f:
22
+ f.write("id,expansion_factor\n")
23
+
24
+ for i in range(len(pre_folder)):
25
+ name = os.path.basename(pre_folder[i])
26
+ name, _ = os.path.splitext(name)
27
+
28
+ pre = cv2.imread(pre_folder[i], cv2.COLOR_BGR2GRAY)
29
+ post = cv2.imread(post_folder[i], cv2.COLOR_BGR2GRAY)
30
+
31
+ expansion_factor = rigid_registration(pre, post)
32
+
33
+ with open(save_file, "a") as f:
34
+ f.write(f"{name},{expansion_factor}\n")
data/registration/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from registration.rigid_registration import rigid_registration
data/registration/rigid_registration.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import cv2
3
+
4
+
5
+ def rescale_and_convert(image):
6
+ lower_bound, upper_bound = np.percentile(image, (0, 98))
7
+
8
+ image = np.clip(image, lower_bound, upper_bound)
9
+ image = (image - lower_bound) / (upper_bound - lower_bound)
10
+
11
+ return np.asarray(image * (2 ** 8 - 1), dtype=np.uint8)
12
+
13
+
14
+ def rigid_registration(pre, post, h_flip=False):
15
+ if not h_flip:
16
+ if pre.dtype != np.uint8:
17
+ pre = rescale_and_convert(pre)
18
+ if post.dtype != np.uint8:
19
+ post = rescale_and_convert(post)
20
+
21
+ sift = cv2.xfeatures2d.SIFT_create(sigma=1.6)
22
+ kp1, des1 = sift.detectAndCompute(post, None)
23
+ kp2, des2 = sift.detectAndCompute(pre, None)
24
+
25
+ bf = cv2.BFMatcher()
26
+ matches = bf.knnMatch(des1, des2, k=2)
27
+
28
+ good = []
29
+ for m, n in matches:
30
+ if m.distance < 0.7 * n.distance:
31
+ good.append(m)
32
+
33
+ MIN_MATCH_COUNT = 10
34
+
35
+ if len(good) > MIN_MATCH_COUNT:
36
+ print("\x1b[32mSuccess! Enough matches found: %d>%d\x1b[0m."% (len(good), MIN_MATCH_COUNT), "Horizontal flip:", h_flip)
37
+
38
+ src_pts = np.float32([kp1[m.queryIdx].pt for m in good]).reshape(-1, 1, 2)
39
+ dst_pts = np.float32([kp2[m.trainIdx].pt for m in good]).reshape(-1, 1, 2)
40
+
41
+ M, _ = cv2.estimateAffinePartial2D(src_pts, dst_pts, method=cv2.RANSAC)
42
+
43
+ cos_scale = M[0, 0]
44
+ sin_scale = M[0, 1]
45
+
46
+ expansion_factor = 1 / ((cos_scale ** 2 + sin_scale ** 2) ** 0.5)
47
+
48
+ elif not h_flip:
49
+ print("\x1b[31mFailure! Not enough matches are found: %d<%d\x1b[0m."% (len(good), MIN_MATCH_COUNT), "Attempting horizontal flip...")
50
+ return rigid_registration(pre, post[:, ::-1], h_flip=True)
51
+
52
+ else:
53
+ print("\x1b[31mFailure! Not enough matches are found: %d<%d\x1b[0m."% (len(good), MIN_MATCH_COUNT), "Horizontal flip:", h_flip)
54
+ expansion_factor = None
55
+
56
+ return expansion_factor