Zeel commited on
Commit
9af7384
·
1 Parent(s): ab52ae3
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. LICENSE +29 -0
  2. README.md +102 -3
  3. _quarto.yml +22 -0
  4. examples/air_dat.ipynb.bak +0 -0
  5. examples/all_in_one.ipynb +0 -0
  6. index.qmd +11 -0
  7. polire.egg-info/PKG-INFO +112 -0
  8. polire.egg-info/SOURCES.txt +41 -0
  9. polire.egg-info/dependency_links.txt +1 -0
  10. polire.egg-info/not-zip-safe +1 -0
  11. polire.egg-info/requires.txt +21 -0
  12. polire.egg-info/top_level.txt +2 -0
  13. polire/__init__.py +12 -0
  14. polire/__pycache__/__init__.cpython-310.pyc +0 -0
  15. polire/__pycache__/__init__.cpython-39.pyc +0 -0
  16. polire/__pycache__/_version.cpython-39.pyc +0 -0
  17. polire/__pycache__/constants.cpython-310.pyc +0 -0
  18. polire/__pycache__/constants.cpython-39.pyc +0 -0
  19. polire/base/__init__.py +1 -0
  20. polire/base/__pycache__/__init__.cpython-310.pyc +0 -0
  21. polire/base/__pycache__/base.cpython-310.pyc +0 -0
  22. polire/base/base.py +130 -0
  23. polire/constants.py +9 -0
  24. polire/custom/__init__.py +1 -0
  25. polire/custom/__pycache__/__init__.cpython-310.pyc +0 -0
  26. polire/custom/__pycache__/custom.cpython-310.pyc +0 -0
  27. polire/custom/custom.py +62 -0
  28. polire/gp/__init__.py +0 -0
  29. polire/gp/__pycache__/__init__.cpython-310.pyc +0 -0
  30. polire/gp/__pycache__/gp.cpython-310.pyc +0 -0
  31. polire/gp/gp.py +65 -0
  32. polire/gp/tests/GP interpolation.ipynb +224 -0
  33. polire/idw/__init__.py +0 -0
  34. polire/idw/__pycache__/__init__.cpython-310.pyc +0 -0
  35. polire/idw/__pycache__/idw.cpython-310.pyc +0 -0
  36. polire/idw/idw.py +91 -0
  37. polire/idw/tests/IDW Initial.ipynb +313 -0
  38. polire/idw/tests/Numpy+IDWTest.ipynb +411 -0
  39. polire/kriging/__init__.py +0 -0
  40. polire/kriging/__pycache__/__init__.cpython-310.pyc +0 -0
  41. polire/kriging/__pycache__/kriging.cpython-310.pyc +0 -0
  42. polire/kriging/kriging.py +146 -0
  43. polire/kriging/tests/Kriging Interpolation.ipynb +224 -0
  44. polire/natural_neighbors/__init__.py +0 -0
  45. polire/natural_neighbors/__pycache__/__init__.cpython-310.pyc +0 -0
  46. polire/natural_neighbors/__pycache__/natural_neighbors.cpython-310.pyc +0 -0
  47. polire/natural_neighbors/natural_neighbors.py +210 -0
  48. polire/nsgp/__init__.py +0 -0
  49. polire/nsgp/__pycache__/__init__.cpython-310.pyc +0 -0
  50. polire/nsgp/__pycache__/nsgp.cpython-310.pyc +0 -0
LICENSE ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2020, sustainability-lab
4
+ All rights reserved.
5
+
6
+ Redistribution and use in source and binary forms, with or without
7
+ modification, are permitted provided that the following conditions are met:
8
+
9
+ 1. Redistributions of source code must retain the above copyright notice, this
10
+ list of conditions and the following disclaimer.
11
+
12
+ 2. Redistributions in binary form must reproduce the above copyright notice,
13
+ this list of conditions and the following disclaimer in the documentation
14
+ and/or other materials provided with the distribution.
15
+
16
+ 3. Neither the name of the copyright holder nor the names of its
17
+ contributors may be used to endorse or promote products derived from
18
+ this software without specific prior written permission.
19
+
20
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
README.md CHANGED
@@ -1,3 +1,102 @@
1
- ---
2
- license: mit
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ![Tests](https://github.com/sustainability-lab/polire/actions/workflows/tests.yml/badge.svg)
2
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
3
+ [![Coverage](https://coveralls.io/repos/github/sustainability-lab/polire/badge.svg?branch=master)](https://coveralls.io/github/sustainability-lab/polire?branch=master)
4
+
5
+ ## Polire
6
+
7
+ ```python
8
+ pip install polire
9
+ ```
10
+
11
+
12
+ The word "interpolation" has a Latin origin and is composed of two words - Inter, meaning between, and Polire, meaning to polish.
13
+
14
+
15
+ This repository is a collection of several spatial interpolation algorithms.
16
+
17
+
18
+ ## Examples
19
+ Please refer to [the documentation](https://sustainability-lab.github.io/polire/) to check out practical examples on real datasets.
20
+
21
+ ### Minimal example of interpolation
22
+ ```python
23
+ import numpy as np
24
+ from polire import Kriging
25
+
26
+ # Data
27
+ X = np.random.rand(10, 2) # Spatial 2D points
28
+ y = np.random.rand(10) # Observations
29
+ X_new = np.random.rand(100, 2) # New spatial points
30
+
31
+ # Fit
32
+ model = Kriging()
33
+ model.fit(X, y)
34
+
35
+ # Predict
36
+ y_new = model.predict(X_new)
37
+ ```
38
+
39
+ ### Supported Interpolation Methods
40
+ ```python
41
+ from polire import (
42
+ Kriging, # Best spatial unbiased predictor
43
+ GP, # Gaussian process interpolator from GPy
44
+ IDW, # Inverse distance weighting
45
+ SpatialAverage,
46
+ Spline,
47
+ Trend,
48
+ Random, # Predict uniformly within the observation range, a reasonable baseline
49
+ NaturalNeighbor,
50
+ CustomInterpolator # Supports any regressor from Scikit-learn
51
+ )
52
+ ```
53
+
54
+ ### Use GP kernels from GPy (temporarily unavailable)
55
+ ```python
56
+ from GPy.kern import Matern32 # or any other GPy kernel
57
+
58
+ # GP model
59
+ model = GP(Matern32(input_dim=2))
60
+ ```
61
+
62
+ ### Regressors from sklearn
63
+ ```py
64
+ from sklearn.linear_model import LinearRegression # or any Scikit-learn regressor
65
+ from polire import GP, CustomInterpolator
66
+
67
+ # Sklearn model
68
+ model = CustomInterpolator(LinearRegression())
69
+ ```
70
+
71
+ ### Extract spatial features from spatio-temporal dataset
72
+ ```python
73
+ # X and X_new are datasets as numpy arrays with first three dimensions as longitude, latitute and time.
74
+ # y is corresponding observations with X
75
+
76
+ from polire.preprocessing import SpatialFeatures
77
+ spatial = SpatialFeatures(n_closest=10)
78
+ Features = spatial.fit_transform(X, y)
79
+ Features_new = spatial.transform(X_new)
80
+ ```
81
+
82
+ ## Citation
83
+
84
+ If you use this library, please cite the following paper:
85
+
86
+ ```
87
+ @inproceedings{10.1145/3384419.3430407,
88
+ author = {Narayanan, S Deepak and Patel, Zeel B and Agnihotri, Apoorv and Batra, Nipun},
89
+ title = {A Toolkit for Spatial Interpolation and Sensor Placement},
90
+ year = {2020},
91
+ isbn = {9781450375900},
92
+ publisher = {Association for Computing Machinery},
93
+ address = {New York, NY, USA},
94
+ url = {https://doi.org/10.1145/3384419.3430407},
95
+ doi = {10.1145/3384419.3430407},
96
+ booktitle = {Proceedings of the 18th Conference on Embedded Networked Sensor Systems},
97
+ pages = {653–654},
98
+ numpages = {2},
99
+ location = {Virtual Event, Japan},
100
+ series = {SenSys '20}
101
+ }
102
+ ```
_quarto.yml ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ project:
2
+ type: website
3
+ output-dir: docs
4
+
5
+ # render only the contents mentioned in the _quarto.yml file
6
+
7
+
8
+ website:
9
+ title: "Polire"
10
+ sidebar:
11
+ style: "docked"
12
+ search: true
13
+ contents:
14
+ - section: "Introduction"
15
+ path: "index.qmd"
16
+
17
+ - section: "Examples"
18
+ contents:
19
+ - examples/all_in_one.ipynb
20
+
21
+ execute:
22
+ freeze: auto
examples/air_dat.ipynb.bak ADDED
The diff for this file is too large to render. See raw diff
 
examples/all_in_one.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
index.qmd ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Polire
2
+
3
+ ```python
4
+ pip install polire
5
+ ```
6
+
7
+
8
+ The word "interpolation" has Latin origin and is composed of two words - Inter meaning between and Polire meaning to polish.
9
+
10
+
11
+ Polire is a collection of several spatial interpolation algorithms.
polire.egg-info/PKG-INFO ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: polire
3
+ Version: 0.1.3
4
+ Summary: A collection of interpolation methods.
5
+ Home-page: https://sustainability-lab.github.io/polire
6
+ Download-URL: https://sustainability-lab.github.io/polire
7
+ Maintainer: Zeel B Patel, Apoorv Agnihotri, S Deepak Narayanan
8
9
+ License: new BSD
10
+ Classifier: Intended Audience :: Science/Research
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: License :: OSI Approved
13
+ Classifier: Programming Language :: Python
14
+ Classifier: Topic :: Software Development
15
+ Classifier: Topic :: Scientific/Engineering
16
+ Classifier: Operating System :: Microsoft :: Windows
17
+ Classifier: Operating System :: POSIX
18
+ Classifier: Operating System :: Unix
19
+ Classifier: Operating System :: MacOS
20
+ Classifier: Programming Language :: Python :: 2.7
21
+ Classifier: Programming Language :: Python :: 3.5
22
+ Classifier: Programming Language :: Python :: 3.6
23
+ Classifier: Programming Language :: Python :: 3.7
24
+ Description-Content-Type: text/markdown
25
+ Provides-Extra: tests
26
+ Provides-Extra: docs
27
+ License-File: LICENSE
28
+
29
+ ![Tests](https://github.com/sustainability-lab/polire/actions/workflows/tests.yml/badge.svg)
30
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
31
+
32
+
33
+ ## Polire
34
+
35
+ ```python
36
+ pip install polire
37
+ ```
38
+
39
+
40
+ The word "interpolation" has Latin origin and is composed of two words - Inter meaning between and Polire meaning to polish.
41
+
42
+
43
+ This repository is a collection of several spatial interpolation algorithms.
44
+
45
+ ## Examples
46
+ ### Minimal example of interpolation
47
+ ```python
48
+ import numpy as np
49
+ from polire import Kriging
50
+
51
+ # Data
52
+ X = np.random.rand(10, 2) # Spatial 2D points
53
+ y = np.random.rand(10) # Observations
54
+ X_new = np.random.rand(100, 2) # New spatial points
55
+
56
+ # Fit
57
+ model = Kriging()
58
+ model.fit(X, y)
59
+
60
+ # Predict
61
+ y_new = model.predict(X_new)
62
+ ```
63
+
64
+ ### Supported Interpolation Methods
65
+ ```python
66
+ from polire import (
67
+ Kriging, # Best spatial unbiased predictor
68
+ GP, # Gaussian process interpolator from GPy
69
+ IDW, # Inverse distance weighting
70
+ SpatialAverage,
71
+ Spline,
72
+ Trend,
73
+ Random, # Predict uniformly within the observation range, a reasonable baseline
74
+ NaturalNeighbor,
75
+ CustomInterpolator # Supports any regressor from Scikit-learn
76
+ )
77
+ ```
78
+
79
+ ### Use GP kernels from GPy and regressors from sklearn
80
+ ```python
81
+ from sklearn.linear_model import LinearRegression # or any Scikit-learn regressor
82
+ from GPy.kern import Matern32 # or any other GPy kernel
83
+
84
+ from polire import GP, CustomInterpolator
85
+
86
+ # GP model
87
+ model = GP(Matern32(input_dim=2))
88
+
89
+ # Sklearn model
90
+ model = CustomInterpolator(LinearRegression(normalize = True))
91
+ ```
92
+
93
+ ### Extract spatial features from spatio-temporal dataset
94
+ ```python
95
+ # X and X_new are datasets as numpy arrays with first three dimensions as longitude, latitute and time.
96
+ # y is corresponding observations with X
97
+
98
+ from polire.preprocessing import SpatialFeatures
99
+ spatial = SpatialFeatures(n_closest=10)
100
+ Features = spatial.fit_transform(X, y)
101
+ Features_new = spatial.transform(X_new)
102
+ ```
103
+
104
+ ## More info
105
+
106
+ Contributors: [S Deepak Narayanan](https://github.com/sdeepaknarayanan), [Zeel B Patel*](https://github.com/patel-zeel), [Apoorv Agnihotri](https://github.com/apoorvagnihotri), and [Nipun Batra*](https://github.com/nipunbatra) (People with * are currently active contributers).
107
+
108
+ This project is a part of Sustainability Lab at IIT Gandhinagar.
109
+
110
+ Acknowledgement to sklearn template for helping to package into a PiPy package.
111
+
112
+
polire.egg-info/SOURCES.txt ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ LICENSE
2
+ README.md
3
+ setup.py
4
+ polire/__init__.py
5
+ polire/constants.py
6
+ polire.egg-info/PKG-INFO
7
+ polire.egg-info/SOURCES.txt
8
+ polire.egg-info/dependency_links.txt
9
+ polire.egg-info/not-zip-safe
10
+ polire.egg-info/requires.txt
11
+ polire.egg-info/top_level.txt
12
+ polire/base/__init__.py
13
+ polire/base/base.py
14
+ polire/custom/__init__.py
15
+ polire/custom/custom.py
16
+ polire/gp/__init__.py
17
+ polire/gp/gp.py
18
+ polire/idw/__init__.py
19
+ polire/idw/idw.py
20
+ polire/kriging/__init__.py
21
+ polire/kriging/kriging.py
22
+ polire/natural_neighbors/__init__.py
23
+ polire/natural_neighbors/natural_neighbors.py
24
+ polire/nsgp/__init__.py
25
+ polire/nsgp/nsgp.py
26
+ polire/preprocessing/__init__.py
27
+ polire/preprocessing/sptial_features.py
28
+ polire/random/__init__.py
29
+ polire/random/random.py
30
+ polire/spatial/__init__.py
31
+ polire/spatial/spatial.py
32
+ polire/spline/__init__.py
33
+ polire/spline/bspline.py
34
+ polire/trend/__init__.py
35
+ polire/trend/polynomials.py
36
+ polire/trend/trend.py
37
+ polire/utils/__init__.py
38
+ polire/utils/distance.py
39
+ polire/utils/gridding.py
40
+ tests/__init__.py
41
+ tests/test_basic.py
polire.egg-info/dependency_links.txt ADDED
@@ -0,0 +1 @@
 
 
1
+
polire.egg-info/not-zip-safe ADDED
@@ -0,0 +1 @@
 
 
1
+
polire.egg-info/requires.txt ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ matplotlib
2
+ numpy
3
+ pandas
4
+ pykrige
5
+ scikit_learn
6
+ scipy
7
+ seaborn
8
+ Shapely
9
+ xgboost
10
+ GPy
11
+
12
+ [docs]
13
+ sphinx
14
+ sphinx-gallery
15
+ sphinx_rtd_theme
16
+ numpydoc
17
+ matplotlib
18
+
19
+ [tests]
20
+ pytest
21
+ pytest-cov
polire.egg-info/top_level.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ polire
2
+ tests
polire/__init__.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .random.random import Random
2
+ from .idw.idw import IDW
3
+ from .spline.bspline import Spline
4
+ from .trend.trend import Trend
5
+ from .spatial.spatial import SpatialAverage
6
+ from .natural_neighbors.natural_neighbors import NaturalNeighbor
7
+ from .kriging.kriging import Kriging
8
+
9
+ # from .gp.gp import GP
10
+ from .custom.custom import CustomInterpolator
11
+
12
+ # from .nsgp.nsgp import NSGP
polire/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (532 Bytes). View file
 
polire/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (301 Bytes). View file
 
polire/__pycache__/_version.cpython-39.pyc ADDED
Binary file (190 Bytes). View file
 
polire/__pycache__/constants.cpython-310.pyc ADDED
Binary file (366 Bytes). View file
 
polire/__pycache__/constants.cpython-39.pyc ADDED
Binary file (400 Bytes). View file
 
polire/base/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from .base import Base
polire/base/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (173 Bytes). View file
 
polire/base/__pycache__/base.cpython-310.pyc ADDED
Binary file (3.88 kB). View file
 
polire/base/base.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from ..constants import RESOLUTION
2
+
3
+
4
+ class Base:
5
+ """A class that is declared for performing Interpolation.
6
+ This class should not be called directly, use one of it's
7
+ children.
8
+ """
9
+
10
+ def __init__(self, resolution="standard", coordinate_types="Euclidean"):
11
+ self.resolution = RESOLUTION[resolution]
12
+ self.coordinate_type = coordinate_types
13
+ self._fit_called = False
14
+
15
+ def fit(self, X, y, **kwargs):
16
+ """The function call to fit the model on the given data.
17
+
18
+ Parameters
19
+ ----------
20
+
21
+ X: {array-like, 2D matrix}, shape(n_samples, 2)
22
+ The set of all coordinates, where we have ground truth
23
+ values
24
+ y: array-like, shape(n_samples,)
25
+ The set of all the ground truth values using which
26
+ we perform interpolation
27
+
28
+ Returns
29
+ -------
30
+
31
+ self : object
32
+ Returns self
33
+
34
+ """
35
+ assert len(X.shape) == 2, "X must be a 2D array got shape = " + str(
36
+ X.shape
37
+ )
38
+ # assert X.shape[1] == 2, "X can not have more than 2 dimensions"
39
+ assert len(y.shape) == 1, "y should be a 1d array"
40
+ assert y.shape[0] == X.shape[0], "X and y must be of the same size"
41
+
42
+ # saving that fit was called
43
+ self._fit_called = True
44
+
45
+ # saving boundaries
46
+ self.x1min_d = min(X[:, 0])
47
+ self.x1max_d = max(X[:, 0])
48
+ self.x2min_d = min(X[:, 1])
49
+ self.x2max_d = max(X[:, 1])
50
+ return self._fit(X, y, **kwargs) # calling child specific fit method
51
+
52
+ def predict(self, X, **kwargs):
53
+ """The function call to return interpolated data on specific
54
+ points.
55
+
56
+ Parameters
57
+ ----------
58
+
59
+ X: {array-like, 2D matrix}, shape(n_samples, 2)
60
+ The set of all coordinates, where we have ground truth
61
+ values
62
+
63
+ Returns
64
+ -------
65
+
66
+ y_pred : array-like, shape(n_samples,)
67
+ The set of interpolated values for the points used to
68
+ call the function.
69
+ """
70
+
71
+ assert len(X.shape) == 2, "X must be a 2D array got shape = " + str(
72
+ X.shape
73
+ )
74
+ # assert X.shape[1] == 2, "X can not have more than 2 dimensions"
75
+
76
+ # checking if model is fitted or not
77
+ assert self._fit_called, "First call fit method to fit the model"
78
+
79
+ # calling child specific _predict method
80
+ return self._predict(X, **kwargs)
81
+
82
+ def predict_grid(self, x1lim=None, x2lim=None, support_extrapolation=True):
83
+ """Function to interpolate data on a grid of given size.
84
+ .
85
+ Parameters
86
+ ----------
87
+ x1lim: tuple(float, float),
88
+ Upper and lower bound on 1st dimension for the interpolation.
89
+
90
+ x2lim: tuple(float, float),
91
+ Upper and lower bound on 2nd dimension for the interpolation.
92
+
93
+ Returns
94
+ -------
95
+ y: array-like, shape(n_samples,)
96
+ Interpolated values on the grid requested.
97
+ """
98
+ # checking if model is fitted or not
99
+ assert self._fit_called, "First call fit method to fit the model"
100
+
101
+ # by default we interpolate over the whole grid
102
+ if x1lim is None:
103
+ x1lim = (self.x1min_d, self.x1max_d)
104
+ if x2lim is None:
105
+ x2lim = (self.x2min_d, self.x2max_d)
106
+ (x1min, x1max) = x1lim
107
+ (x2min, x2max) = x2lim
108
+
109
+ # extrapolation isn't supported yet
110
+ if not support_extrapolation:
111
+ assert self.x1min_d >= x1min, "Extrapolation not supported"
112
+ assert self.x1max_d <= x1max, "Extrapolation not supported"
113
+ assert self.x2min_d >= x2min, "Extrapolation not supported"
114
+ assert self.x2max_d <= x2max, "Extrapolation not supported"
115
+
116
+ # calling child specific _predict_grid method
117
+ pred_y = self._predict_grid(x1lim, x2lim)
118
+ return pred_y.reshape(self.resolution, self.resolution)
119
+
120
+ def __repr__(self):
121
+ return self.__class__.__name__
122
+
123
+ def _fit(self, X, y):
124
+ raise NotImplementedError
125
+
126
+ def _predict_grid(self, x1lim, x2lim):
127
+ raise NotImplementedError
128
+
129
+ def _predict(self, X):
130
+ raise NotImplementedError
polire/constants.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ """This python script contains all the constants that
2
+ might be needed in the various interpolation pacakages.
3
+ """
4
+
5
+ low_res = 10
6
+ med_res = 100
7
+ high_res = 1000
8
+
9
+ RESOLUTION = {"low": low_res, "standard": med_res, "high": high_res}
polire/custom/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from .custom import CustomInterpolator
polire/custom/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (191 Bytes). View file
 
polire/custom/__pycache__/custom.cpython-310.pyc ADDED
Binary file (2.45 kB). View file
 
polire/custom/custom.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from ..base import Base
4
+
5
+
6
+ class CustomInterpolator(Base):
7
+ """
8
+ Class to interpolate by fitting a sklearn type Regressor to
9
+ the given data.
10
+
11
+ Parameters
12
+ ----------
13
+ regressor: class definition,
14
+ This variable is used to pass in the Regressor we would like
15
+ to use for interpolation. The regressor sould be sklearn type
16
+ regressor. Example from sklearn.ensemble -> RandomForestRegressor
17
+
18
+ reg_kwargs: dict, optional
19
+ This is a dictionary that is passed into the Regressor initialization.
20
+ Use this to change the behaviour of the passed regressor. Default = empty dict
21
+
22
+ Attributes
23
+ ----------
24
+ reg : object
25
+ Object of the `regressor` class passed.
26
+ """
27
+
28
+ def __init__(
29
+ self, regressor, resolution="standard", coordinate_type="Euclidean"
30
+ ):
31
+ super().__init__(resolution, coordinate_type)
32
+ self.reg = regressor
33
+
34
+ def _fit(self, X, y):
35
+ """Function for fitting.
36
+ This function is not supposed to be called directly.
37
+ """
38
+ self.reg.fit(X, y)
39
+ return self
40
+
41
+ def _predict_grid(self, x1lim, x2lim):
42
+ """Function for grid interpolation.
43
+ This function is not supposed to be called directly.
44
+ """
45
+ # getting the boundaries for interpolation
46
+ x1min, x1max = x1lim
47
+ x2min, x2max = x2lim
48
+
49
+ # building the grid
50
+ x1 = np.linspace(x1min, x1max, self.resolution)
51
+ x2 = np.linspace(x2min, x2max, self.resolution)
52
+ X1, X2 = np.meshgrid(x1, x2)
53
+ return self.reg.predict(np.asarray([X1.ravel(), X2.ravel()]).T)
54
+
55
+ def _predict(self, X):
56
+ """Function for interpolation on specific points.
57
+ This function is not supposed to be called directly.
58
+ """
59
+ return self.reg.predict(X)
60
+
61
+ def __repr__(self):
62
+ return self.__class__.__name__ + "." + self.reg.__class__.__name__
polire/gp/__init__.py ADDED
File without changes
polire/gp/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (138 Bytes). View file
 
polire/gp/__pycache__/gp.cpython-310.pyc ADDED
Binary file (2.68 kB). View file
 
polire/gp/gp.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This is a module for GP Interpolation
3
+ """
4
+ import numpy as np
5
+ from ..base import Base
6
+ from GPy.models import GPRegression
7
+ from GPy.kern import RBF
8
+
9
+
10
+ class GP(Base):
11
+ """A class that is declared for performing GP interpolation.
12
+ GP interpolation (usually) works on the principle of finding the
13
+ best unbiased predictor.
14
+
15
+ Parameters
16
+ ----------
17
+ type : str, optional
18
+ This parameter defines the type of Kriging under consideration. This
19
+ implementation uses PyKrige package (https://github.com/bsmurphy/PyKrige).
20
+ The user needs to choose between "Ordinary" and "Universal".
21
+
22
+ """
23
+
24
+ def __init__(
25
+ self,
26
+ kernel=RBF(2, ARD=True),
27
+ ):
28
+ super().__init__()
29
+ self.kernel = kernel
30
+
31
+ def _fit(self, X, y, n_restarts=5, verbose=False, random_state=None):
32
+ """Fit method for GP Interpolation
33
+ This function shouldn't be called directly.
34
+ """
35
+ np.random.seed(random_state)
36
+ if len(y.shape) == 1:
37
+ y = y.reshape(-1, 1)
38
+ self.model = GPRegression(X, y, self.kernel)
39
+ self.model.optimize_restarts(n_restarts, verbose=verbose)
40
+ return self
41
+
42
+ def _predict_grid(self, x1lim, x2lim):
43
+ """The function that is called to return the interpolated data in Kriging Interpolation
44
+ in a grid. This method shouldn't be called directly"""
45
+ lims = (*x1lim, *x2lim)
46
+ x1min, x1max, x2min, x2max = lims
47
+ x1 = np.linspace(x1min, x1max, self.resolution)
48
+ x2 = np.linspace(x2min, x2max, self.resolution)
49
+
50
+ X1, X2 = np.meshgrid(x1, x2)
51
+ X = np.array([(i, j) for i, j in zip(X1.ravel(), X2.ravel())])
52
+
53
+ predictions = self.model.predict(X)[0].reshape(len(x1), len(x2))
54
+
55
+ return predictions.ravel()
56
+
57
+ def _predict(self, X, return_variance=False):
58
+ """This function should be called to return the interpolated data in kriging
59
+ in a pointwise manner. This method shouldn't be called directly."""
60
+
61
+ predictions, variance = self.model.predict(X)
62
+ if return_variance:
63
+ return predictions.ravel(), variance
64
+ else:
65
+ return predictions.ravel()
polire/gp/tests/GP interpolation.ipynb ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from pykrige import OrdinaryKriging"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 4,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "import pandas as pd\n",
19
+ "import numpy as np"
20
+ ]
21
+ },
22
+ {
23
+ "cell_type": "code",
24
+ "execution_count": 38,
25
+ "metadata": {},
26
+ "outputs": [],
27
+ "source": []
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": 10,
32
+ "metadata": {},
33
+ "outputs": [],
34
+ "source": [
35
+ "ok = OrdinaryKriging(data[:,0],data[:,1],data[:,2])\n",
36
+ "ok.ex"
37
+ ]
38
+ },
39
+ {
40
+ "cell_type": "code",
41
+ "execution_count": 43,
42
+ "metadata": {},
43
+ "outputs": [],
44
+ "source": [
45
+ "a,b = ok.execute('grid',x[0],y[:,0])"
46
+ ]
47
+ },
48
+ {
49
+ "cell_type": "code",
50
+ "execution_count": 61,
51
+ "metadata": {},
52
+ "outputs": [],
53
+ "source": [
54
+ "from pykrige import OrdinaryKriging\n",
55
+ "import pandas as pd\n",
56
+ "import numpy as np\n",
57
+ "\n",
58
+ "def ordinary_kriging(dataset, resolution='standard', coordinate_type='euclidean',verbose='False',method='grid', isvariance = False):\n",
59
+ " if coordinate_type == 'latlong_small':\n",
60
+ " \"\"\"\n",
61
+ " Assume that the Earth is a Sphere, and use polar coordinates\n",
62
+ " $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n",
63
+ " \"\"\"\n",
64
+ " return \"To be done later\"\n",
65
+ " if coordinate_type == 'latlong_large':\n",
66
+ " \"\"\"\n",
67
+ " Code to be written after understanding all the projections.\n",
68
+ " \"\"\"\n",
69
+ " return \"To be done later\"\n",
70
+ " if coordinate_type==\"euclidean\":\n",
71
+ " \n",
72
+ " ok = OrdinaryKriging(dataset[:,0],dataset[:,1],dataset[:,2])\n",
73
+ " X = dataset[:,0]\n",
74
+ " y = dataset[:,1]\n",
75
+ " \n",
76
+ " if resolution=='high':\n",
77
+ " xx,yy = make_grid(X,y,1000)\n",
78
+ " \n",
79
+ " elif resolution=='low':\n",
80
+ " xx,yy = make_grid(X,y,10)\n",
81
+ " \n",
82
+ " elif resolution=='standard':\n",
83
+ " xx,yy = make_grid(X,y,100)\n",
84
+ " \n",
85
+ " else:\n",
86
+ " print('Value Error - Resolution can only be one of \\nhigh, low or standard')\n",
87
+ " \n",
88
+ " values, variances = ok.execute(method, xx[0], yy[:,0])\n",
89
+ " \n",
90
+ " if isvariance:\n",
91
+ " return values, variances\n",
92
+ " else:\n",
93
+ " del variances\n",
94
+ " return np.array(values)"
95
+ ]
96
+ },
97
+ {
98
+ "cell_type": "code",
99
+ "execution_count": 62,
100
+ "metadata": {},
101
+ "outputs": [
102
+ {
103
+ "data": {
104
+ "text/plain": [
105
+ "array([[129.94984945, 129.7682324 , 129.58820662, ..., 159.34079485,\n",
106
+ " 159.99175016, 160.63241067],\n",
107
+ " [130.22090025, 130.03615966, 129.8529146 , ..., 159.9575165 ,\n",
108
+ " 160.61228126, 161.25625641],\n",
109
+ " [130.50105231, 130.31324536, 130.12683652, ..., 160.59265384,\n",
110
+ " 161.25084023, 161.8977369 ],\n",
111
+ " ...,\n",
112
+ " [207.22133238, 207.82739139, 208.44615116, ..., 248.64646661,\n",
113
+ " 248.3790241 , 248.11033441],\n",
114
+ " [207.92838926, 208.53490708, 209.15376273, ..., 248.91678379,\n",
115
+ " 248.65601627, 248.39371596],\n",
116
+ " [208.61942088, 209.22595474, 209.84445913, ..., 249.17442481,\n",
117
+ " 248.9203453 , 248.66446245]])"
118
+ ]
119
+ },
120
+ "execution_count": 62,
121
+ "metadata": {},
122
+ "output_type": "execute_result"
123
+ }
124
+ ],
125
+ "source": [
126
+ "ordinary_kriging(data)"
127
+ ]
128
+ },
129
+ {
130
+ "cell_type": "markdown",
131
+ "metadata": {},
132
+ "source": [
133
+ "* What does ok('points') really do?\n",
134
+ "* Specifically test when points aren't really passed - they are let's say the point of an array\n",
135
+ "* Returns the diagonal matrix of all these coordinates"
136
+ ]
137
+ },
138
+ {
139
+ "cell_type": "code",
140
+ "execution_count": 63,
141
+ "metadata": {
142
+ "scrolled": true
143
+ },
144
+ "outputs": [
145
+ {
146
+ "data": {
147
+ "text/plain": [
148
+ "array([129.94984945, 130.03615966, 130.12683652, 130.22219703,\n",
149
+ " 130.32258826, 130.42839089, 130.54002324, 130.65794596,\n",
150
+ " 130.7826674 , 130.91474976, 131.05481629, 131.20355964,\n",
151
+ " 131.36175158, 131.53025441, 131.71003442, 131.90217771,\n",
152
+ " 132.107909 , 132.32861401, 132.56586607, 132.82145795,\n",
153
+ " 133.0974399 , 133.39616477, 133.72034153, 134.07309736,\n",
154
+ " 134.45804822, 134.87937482, 135.34189663, 135.85112772,\n",
155
+ " 136.41328222, 137.03517039, 137.72388496, 138.48612122,\n",
156
+ " 139.326921 , 140.24763047, 141.24300526, 142.29757046,\n",
157
+ " 143.37881815, 144.38425962, 144.49187978, 143.1202101 ,\n",
158
+ " 141.66667134, 140.45686022, 139.66795657, 142.48270308,\n",
159
+ " 147.03665055, 151.8487008 , 156.90272514, 162.25791164,\n",
160
+ " 168.04938768, 173.63870768, 180.93567147, 190.3440156 ,\n",
161
+ " 199.86834472, 208.48375248, 215.75635742, 222.1915652 ,\n",
162
+ " 228.08641413, 233.15249702, 236.89713686, 239.83524192,\n",
163
+ " 242.45744315, 244.57483343, 245.52139699, 245.88236757,\n",
164
+ " 246.12295211, 246.3306567 , 246.52369882, 246.70598807,\n",
165
+ " 246.87792737, 247.03919426, 247.18952217, 247.3288843 ,\n",
166
+ " 247.45749059, 247.57573348, 247.68412862, 247.78326467,\n",
167
+ " 247.87376505, 247.95626051, 248.03137024, 248.09968963,\n",
168
+ " 248.16178271, 248.21817801, 248.26936683, 248.31580309,\n",
169
+ " 248.35790422, 248.39605277, 248.43059841, 248.46186013,\n",
170
+ " 248.49012851, 248.51566797, 248.53871897, 248.55950011,\n",
171
+ " 248.57821004, 248.59502931, 248.61012204, 248.62363741,\n",
172
+ " 248.63571111, 248.64646661, 248.65601627, 248.66446245])"
173
+ ]
174
+ },
175
+ "execution_count": 63,
176
+ "metadata": {},
177
+ "output_type": "execute_result"
178
+ }
179
+ ],
180
+ "source": [
181
+ "ordinary_kriging(data,method='points')"
182
+ ]
183
+ },
184
+ {
185
+ "cell_type": "code",
186
+ "execution_count": null,
187
+ "metadata": {},
188
+ "outputs": [],
189
+ "source": [
190
+ "def make_grid(X,y,res):\n",
191
+ " y_min = y.min()-0.2\n",
192
+ " y_max = y.max()+0.2\n",
193
+ " x_min = X.min()-0.2\n",
194
+ " x_max = X.max()+0.2\n",
195
+ " x_arr = np.linspace(x_min,x_max,res)\n",
196
+ " y_arr = np.linspace(y_min,y_max,res)\n",
197
+ " xx,yy = np.meshgrid(x_arr,y_arr) \n",
198
+ " return xx,yy\n",
199
+ "x, y = make_grid(data[:,0],data[:,1],100)"
200
+ ]
201
+ }
202
+ ],
203
+ "metadata": {
204
+ "kernelspec": {
205
+ "display_name": "Python 3",
206
+ "language": "python",
207
+ "name": "python3"
208
+ },
209
+ "language_info": {
210
+ "codemirror_mode": {
211
+ "name": "ipython",
212
+ "version": 3
213
+ },
214
+ "file_extension": ".py",
215
+ "mimetype": "text/x-python",
216
+ "name": "python",
217
+ "nbconvert_exporter": "python",
218
+ "pygments_lexer": "ipython3",
219
+ "version": "3.6.8"
220
+ }
221
+ },
222
+ "nbformat": 4,
223
+ "nbformat_minor": 2
224
+ }
polire/idw/__init__.py ADDED
File without changes
polire/idw/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (139 Bytes). View file
 
polire/idw/__pycache__/idw.cpython-310.pyc ADDED
Binary file (3.31 kB). View file
 
polire/idw/idw.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This is a module for inverse distance weighting (IDW) Spatial Interpolation
3
+ """
4
+ import numpy as np
5
+ from ..utils.distance import haversine, euclidean
6
+ from ..base import Base
7
+
8
+
9
+ class IDW(Base):
10
+ """A class that is declared for performing IDW Interpolation.
11
+ For more information on how this method works, kindly refer to
12
+ https://en.wikipedia.org/wiki/Inverse_distance_weighting
13
+
14
+ Parameters
15
+ ----------
16
+ exponent : positive float, optional
17
+ The rate of fall of values from source data points.
18
+ Higher the exponent, lower is the value when we move
19
+ across space. Default value is 2.
20
+
21
+ Attributes
22
+ ----------
23
+ Interpolated Values : {array-like, 2D matrix}, shape(resolution, resolution)
24
+ This contains all the interpolated values when the interpolation is performed
25
+ over a grid, instead of interpolation over a set of points.
26
+
27
+ X : {array-like, 2D matrix}, shape(n_samples, 2)
28
+ Set of all the coordinates available for interpolation.
29
+
30
+ y : array-like, shape(n_samples,)
31
+ Set of all the available values at the specified X coordinates.
32
+
33
+ result : array_like, shape(n_to_predict, )
34
+ Set of all the interpolated values when interpolating over a given
35
+ set of data points.
36
+
37
+ """
38
+
39
+ def __init__(
40
+ self, exponent=2, resolution="standard", coordinate_type="Euclidean"
41
+ ):
42
+ super().__init__(resolution, coordinate_type)
43
+ self.exponent = exponent
44
+ self.interpolated_values = None
45
+ self.X = None
46
+ self.y = None
47
+ self.result = None
48
+ if self.coordinate_type == "Geographic":
49
+ self.distance = haversine
50
+ elif self.coordinate_type == "Euclidean":
51
+ self.distance = euclidean
52
+ else:
53
+ raise NotImplementedError(
54
+ "Only Geographic and Euclidean Coordinates are available"
55
+ )
56
+
57
+ def _fit(self, X, y):
58
+ """This function is for the IDW Class.
59
+ This is not expected to be called directly
60
+ """
61
+ self.X = X
62
+ self.y = y
63
+ return self
64
+
65
+ def _predict_grid(self, x1lim, x2lim):
66
+ """Gridded interpolation for natural neighbors interpolation. This function should not
67
+ be called directly.
68
+ """
69
+ lims = (*x1lim, *x2lim)
70
+ x1min, x1max, x2min, x2max = lims
71
+ x1 = np.linspace(x1min, x1max, self.resolution)
72
+ x2 = np.linspace(x2min, x2max, self.resolution)
73
+ X1, X2 = np.meshgrid(x1, x2)
74
+ return self._predict(np.array([X1.ravel(), X2.ravel()]).T)
75
+
76
+ def _predict(self, X):
77
+ """The function call to predict using the interpolated data
78
+ in IDW interpolation. This should not be called directly.
79
+ """
80
+
81
+ dist = self.distance(self.X, X)
82
+ weights = 1 / np.power(dist, self.exponent)
83
+ result = (weights * self.y[:, None]).sum(axis=0) / weights.sum(axis=0)
84
+
85
+ # if point is from train data, ground truth must not change
86
+ for i in range(X.shape[0]):
87
+ mask = np.equal(X[i], self.X).all(axis=1)
88
+ if mask.any():
89
+ result[i] = (self.y * mask).sum()
90
+
91
+ return result
polire/idw/tests/IDW Initial.ipynb ADDED
@@ -0,0 +1,313 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "metadata": {},
6
+ "source": [
7
+ "## Inverse Distance Weighting (IDW) Interpolation"
8
+ ]
9
+ },
10
+ {
11
+ "cell_type": "markdown",
12
+ "metadata": {},
13
+ "source": [
14
+ "Let us suppose we have a data that shows the variation of one quantity of interest across space.\n",
15
+ "This could be equivalently viewed as { ($\\vec{x_1}, y_1)$,$(\\vec{x_2}, y_2)$,$(\\vec{x_3}, y_3)$, ...}, where the $\\vec{x_i}$'s represent the coordinates of the points where we have data and the $y_i$'s are the actual data at those points. <br><br>\n",
16
+ "We would like to perform an interpolation using these data points such that a few things are satisifed.\n",
17
+ "1. The interpolation is exact - the value at the known data points is the same as the estimated value, and \n",
18
+ "2. We would want far away points from a given source data point to receive less importance than nearby points.\n",
19
+ "3. Wikipedia has an excellent article on IDW. I am linking it [here](https://en.wikipedia.org/wiki/Inverse_distance_weighting)."
20
+ ]
21
+ },
22
+ {
23
+ "cell_type": "markdown",
24
+ "metadata": {},
25
+ "source": [
26
+ "We are using the following approximation for coordinate_type being latlong_small<br>\n",
27
+ "$| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$"
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 1,
33
+ "metadata": {},
34
+ "outputs": [],
35
+ "source": [
36
+ "import numpy as np\n",
37
+ "import pandas as pd\n",
38
+ "df = pd.read_csv('../../testdata/30-03-18.csv')\n",
39
+ "data = np.array(df[['longitude','latitude','value']])"
40
+ ]
41
+ },
42
+ {
43
+ "cell_type": "code",
44
+ "execution_count": 2,
45
+ "metadata": {},
46
+ "outputs": [],
47
+ "source": [
48
+ "def make_grid(X,y,res):\n",
49
+ " y_min = y.min()-0.2\n",
50
+ " y_max = y.max()+0.2\n",
51
+ " x_min = X.min()-0.2\n",
52
+ " x_max = X.max()+0.2\n",
53
+ " x_arr = np.linspace(x_min,x_max,res)\n",
54
+ " y_arr = np.linspace(y_min,y_max,res)\n",
55
+ " xx,yy = np.meshgrid(x_arr,y_arr) \n",
56
+ " return xx,yy\n",
57
+ "\n",
58
+ "def idw(dataset, exponent = 2, resolution='standard', coordinate_type='euclidean',verbose='False'):\n",
59
+ " \"\"\"\n",
60
+ " Here X is the set of spatial locations - Usually assumed to be Lat-Long\n",
61
+ " To be extended to higher dimenstions y - estimated value , exponenet - how\n",
62
+ " much weight to assign to far off locations to be estimated for each data point, \n",
63
+ " extent - interpolate over a grid - what is xmax xmin ymax ymin\n",
64
+ " \"\"\"\n",
65
+ " if coordinate_type == 'latlong_small':\n",
66
+ " \"\"\"\n",
67
+ " Assume that the Earth is a Sphere, and use polar coordinates\n",
68
+ " $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n",
69
+ " \"\"\"\n",
70
+ " return \"To be done later\"\n",
71
+ " if coordinate_type == 'latlong_large':\n",
72
+ " \"\"\"\n",
73
+ " Code to be written after understanding all the projections.\n",
74
+ " \"\"\"\n",
75
+ " return \"To be done later\"\n",
76
+ " if coordinate_type==\"euclidean\":\n",
77
+ " \n",
78
+ "# print(dataset)\n",
79
+ " X = dataset[:,0]\n",
80
+ " y = dataset[:,1]\n",
81
+ " if resolution=='high':\n",
82
+ " xx,yy = make_grid(X,y,1000)\n",
83
+ " \n",
84
+ " if resolution=='low':\n",
85
+ " xx,yy = make_grid(X,y,10)\n",
86
+ " \n",
87
+ " if resolution=='standard':\n",
88
+ " xx,yy = make_grid(X,y,100)\n",
89
+ " \n",
90
+ " new = []\n",
91
+ " new_arr = dataset\n",
92
+ " for points in new_arr:\n",
93
+ " mindist = np.inf\n",
94
+ " val = 0\n",
95
+ " for j in range(len(yy)):\n",
96
+ " temp = yy[j][0]\n",
97
+ " for i in range(len(xx[0])):\n",
98
+ " dist = np.linalg.norm(np.array([xx[0][i],temp]) - points[:2])\n",
99
+ " if dist<mindist:\n",
100
+ " mindist = dist\n",
101
+ " val = (i,j)\n",
102
+ " new.append((points,val))\n",
103
+ " print(new)\n",
104
+ " new_grid = np.zeros((len(xx),len(yy)))\n",
105
+ " for i in range(len(new)):\n",
106
+ " x = new[i][1][0]\n",
107
+ " y = new[i][1][1]\n",
108
+ " new_grid[x][y] = new[i][0][2]\n",
109
+ " print(new[i])\n",
110
+ " x_nz,y_nz = np.nonzero(new_grid)\n",
111
+ " list_nz = []\n",
112
+ " for i in range(len(x_nz)):\n",
113
+ " list_nz.append((x_nz[i],y_nz[i]))\n",
114
+ " \n",
115
+ " final = np.copy(new_grid)\n",
116
+ " \n",
117
+ " for i in range(len(xx[0])):\n",
118
+ " for j in range(len(yy)):\n",
119
+ " normalise = 0\n",
120
+ " if (i,j) in list_nz:\n",
121
+ " continue\n",
122
+ " else:\n",
123
+ " \"\"\"\n",
124
+ " Could potentially have a divide by zero error here\n",
125
+ " Use a try except clause\n",
126
+ " \"\"\"\n",
127
+ " for elem in range(len(x_nz)):\n",
128
+ " source = np.array([x_nz[elem],y_nz[elem]])\n",
129
+ " target = np.array([xx[0][i],yy[j][0]])\n",
130
+ " dist = (np.abs(xx[0][source[0]] - target[0])**exponent + np.abs(yy[source[1]][0] - target[1])**exponent)**(1/exponent)\n",
131
+ " final[i][j]+=new_grid[x_nz[elem],y_nz[elem]]/dist\n",
132
+ " normalise+=1/(dist)\n",
133
+ " final[i][j]/=normalise\n",
134
+ " \n",
135
+ " return final\n"
136
+ ]
137
+ },
138
+ {
139
+ "cell_type": "code",
140
+ "execution_count": 3,
141
+ "metadata": {
142
+ "scrolled": true
143
+ },
144
+ "outputs": [
145
+ {
146
+ "name": "stdout",
147
+ "output_type": "stream",
148
+ "text": [
149
+ "[(array([ 77.234291, 28.581197, 194. ]), (60, 39)), (array([ 77.245721, 28.739434, 267. ]), (62, 60)), (array([ 77.101961, 28.822931, 273. ]), (42, 72)), (array([ 76.991463, 28.620806, 129. ]), (27, 44)), (array([ 77.0325413, 28.60909 , 176. ]), (33, 42)), (array([ 77.072196, 28.570859, 172. ]), (38, 37)), (array([ 77.1670103, 28.5646102, 168. ]), (51, 36)), (array([ 77.1180053, 28.5627763, 105. ]), (45, 36)), (array([ 77.272404, 28.530782, 203. ]), (66, 32)), (array([ 77.26075 , 28.563827, 192. ]), (64, 36)), (array([77.0996943, 28.610304 , 95. ]), (42, 43)), (array([ 77.2273074, 28.5918245, 148. ]), (59, 40)), (array([ 77.09211 , 28.732219, 203. ]), (41, 59)), (array([ 77.317084, 28.668672, 221. ]), (72, 51)), (array([ 77.1585447, 28.6573814, 141. ]), (50, 49)), (array([ 77.2011573, 28.6802747, 192. ]), (56, 52)), (array([ 77.237372, 28.612561, 203. ]), (61, 43)), (array([ 77.305651, 28.632707, 152. ]), (70, 46)), (array([ 77.1473105, 28.6514781, 185. ]), (49, 48)), (array([ 77.16482 , 28.699254, 290. ]), (51, 55)), (array([ 77.170221, 28.728722, 273. ]), (52, 59)), (array([ 77.2005604, 28.6372688, 173. ]), (56, 46)), (array([ 77.2011573, 28.7256504, 269. ]), (56, 58)), (array([ 77.136777, 28.669119, 160. ]), (47, 51)), (array([77.267246, 28.49968 , 78. ]), (65, 27)), (array([ 77.2494387, 28.6316945, 211. ]), (62, 45)), (array([ 77.2735737, 28.5512005, 252. ]), (66, 34)), (array([ 77.2159377, 28.5504249, 133. ]), (58, 34)), (array([77.1112615, 28.7500499, 77. ]), (44, 62)), (array([77.22445, 28.63576, 96. ]), (59, 46))]\n",
150
+ "(array([ 77.234291, 28.581197, 194. ]), (60, 39))\n",
151
+ "(array([ 77.245721, 28.739434, 267. ]), (62, 60))\n",
152
+ "(array([ 77.101961, 28.822931, 273. ]), (42, 72))\n",
153
+ "(array([ 76.991463, 28.620806, 129. ]), (27, 44))\n",
154
+ "(array([ 77.0325413, 28.60909 , 176. ]), (33, 42))\n",
155
+ "(array([ 77.072196, 28.570859, 172. ]), (38, 37))\n",
156
+ "(array([ 77.1670103, 28.5646102, 168. ]), (51, 36))\n",
157
+ "(array([ 77.1180053, 28.5627763, 105. ]), (45, 36))\n",
158
+ "(array([ 77.272404, 28.530782, 203. ]), (66, 32))\n",
159
+ "(array([ 77.26075 , 28.563827, 192. ]), (64, 36))\n",
160
+ "(array([77.0996943, 28.610304 , 95. ]), (42, 43))\n",
161
+ "(array([ 77.2273074, 28.5918245, 148. ]), (59, 40))\n",
162
+ "(array([ 77.09211 , 28.732219, 203. ]), (41, 59))\n",
163
+ "(array([ 77.317084, 28.668672, 221. ]), (72, 51))\n",
164
+ "(array([ 77.1585447, 28.6573814, 141. ]), (50, 49))\n",
165
+ "(array([ 77.2011573, 28.6802747, 192. ]), (56, 52))\n",
166
+ "(array([ 77.237372, 28.612561, 203. ]), (61, 43))\n",
167
+ "(array([ 77.305651, 28.632707, 152. ]), (70, 46))\n",
168
+ "(array([ 77.1473105, 28.6514781, 185. ]), (49, 48))\n",
169
+ "(array([ 77.16482 , 28.699254, 290. ]), (51, 55))\n",
170
+ "(array([ 77.170221, 28.728722, 273. ]), (52, 59))\n",
171
+ "(array([ 77.2005604, 28.6372688, 173. ]), (56, 46))\n",
172
+ "(array([ 77.2011573, 28.7256504, 269. ]), (56, 58))\n",
173
+ "(array([ 77.136777, 28.669119, 160. ]), (47, 51))\n",
174
+ "(array([77.267246, 28.49968 , 78. ]), (65, 27))\n",
175
+ "(array([ 77.2494387, 28.6316945, 211. ]), (62, 45))\n",
176
+ "(array([ 77.2735737, 28.5512005, 252. ]), (66, 34))\n",
177
+ "(array([ 77.2159377, 28.5504249, 133. ]), (58, 34))\n",
178
+ "(array([77.1112615, 28.7500499, 77. ]), (44, 62))\n",
179
+ "(array([77.22445, 28.63576, 96. ]), (59, 46))\n"
180
+ ]
181
+ },
182
+ {
183
+ "data": {
184
+ "text/plain": [
185
+ "(100, 100)"
186
+ ]
187
+ },
188
+ "execution_count": 3,
189
+ "metadata": {},
190
+ "output_type": "execute_result"
191
+ }
192
+ ],
193
+ "source": [
194
+ "idw(data).shape\n"
195
+ ]
196
+ },
197
+ {
198
+ "cell_type": "code",
199
+ "execution_count": 21,
200
+ "metadata": {},
201
+ "outputs": [],
202
+ "source": [
203
+ "temp = data[10]"
204
+ ]
205
+ },
206
+ {
207
+ "cell_type": "code",
208
+ "execution_count": 36,
209
+ "metadata": {},
210
+ "outputs": [
211
+ {
212
+ "data": {
213
+ "text/plain": [
214
+ "(array([10, 10, 10]), array([0, 1, 2]))"
215
+ ]
216
+ },
217
+ "execution_count": 36,
218
+ "metadata": {},
219
+ "output_type": "execute_result"
220
+ }
221
+ ],
222
+ "source": [
223
+ "np.where(data==temp)"
224
+ ]
225
+ },
226
+ {
227
+ "cell_type": "code",
228
+ "execution_count": 32,
229
+ "metadata": {},
230
+ "outputs": [],
231
+ "source": [
232
+ "result = np.nonzero(data==temp)"
233
+ ]
234
+ },
235
+ {
236
+ "cell_type": "code",
237
+ "execution_count": 37,
238
+ "metadata": {},
239
+ "outputs": [
240
+ {
241
+ "data": {
242
+ "text/plain": [
243
+ "10"
244
+ ]
245
+ },
246
+ "execution_count": 37,
247
+ "metadata": {},
248
+ "output_type": "execute_result"
249
+ }
250
+ ],
251
+ "source": [
252
+ "np.unique(result[0])[0]"
253
+ ]
254
+ },
255
+ {
256
+ "cell_type": "code",
257
+ "execution_count": 29,
258
+ "metadata": {},
259
+ "outputs": [],
260
+ "source": [
261
+ "listOfCoordinates= list(zip(result[0], result[1]))"
262
+ ]
263
+ },
264
+ {
265
+ "cell_type": "code",
266
+ "execution_count": 30,
267
+ "metadata": {},
268
+ "outputs": [
269
+ {
270
+ "data": {
271
+ "text/plain": [
272
+ "[(10, 0), (10, 1), (10, 2)]"
273
+ ]
274
+ },
275
+ "execution_count": 30,
276
+ "metadata": {},
277
+ "output_type": "execute_result"
278
+ }
279
+ ],
280
+ "source": [
281
+ "listOfCoordinates"
282
+ ]
283
+ },
284
+ {
285
+ "cell_type": "code",
286
+ "execution_count": null,
287
+ "metadata": {},
288
+ "outputs": [],
289
+ "source": []
290
+ }
291
+ ],
292
+ "metadata": {
293
+ "kernelspec": {
294
+ "display_name": "Python 3",
295
+ "language": "python",
296
+ "name": "python3"
297
+ },
298
+ "language_info": {
299
+ "codemirror_mode": {
300
+ "name": "ipython",
301
+ "version": 3
302
+ },
303
+ "file_extension": ".py",
304
+ "mimetype": "text/x-python",
305
+ "name": "python",
306
+ "nbconvert_exporter": "python",
307
+ "pygments_lexer": "ipython3",
308
+ "version": "3.6.8"
309
+ }
310
+ },
311
+ "nbformat": 4,
312
+ "nbformat_minor": 2
313
+ }
polire/idw/tests/Numpy+IDWTest.ipynb ADDED
@@ -0,0 +1,411 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import numpy as np"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 2,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "a = np.array([[1,2,3],[4,5,6]])"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 3,
24
+ "metadata": {},
25
+ "outputs": [
26
+ {
27
+ "data": {
28
+ "text/plain": [
29
+ "array([[1, 2, 3],\n",
30
+ " [4, 5, 6]])"
31
+ ]
32
+ },
33
+ "execution_count": 3,
34
+ "metadata": {},
35
+ "output_type": "execute_result"
36
+ }
37
+ ],
38
+ "source": [
39
+ "a"
40
+ ]
41
+ },
42
+ {
43
+ "cell_type": "code",
44
+ "execution_count": 9,
45
+ "metadata": {},
46
+ "outputs": [],
47
+ "source": [
48
+ "b = np.array([[2,3,4],[5,6,9]])"
49
+ ]
50
+ },
51
+ {
52
+ "cell_type": "code",
53
+ "execution_count": 10,
54
+ "metadata": {},
55
+ "outputs": [
56
+ {
57
+ "data": {
58
+ "text/plain": [
59
+ "array([[2, 3, 4],\n",
60
+ " [5, 6, 9]])"
61
+ ]
62
+ },
63
+ "execution_count": 10,
64
+ "metadata": {},
65
+ "output_type": "execute_result"
66
+ }
67
+ ],
68
+ "source": [
69
+ "b"
70
+ ]
71
+ },
72
+ {
73
+ "cell_type": "code",
74
+ "execution_count": 11,
75
+ "metadata": {},
76
+ "outputs": [
77
+ {
78
+ "data": {
79
+ "text/plain": [
80
+ "array([[1, 2, 3],\n",
81
+ " [4, 5, 6]])"
82
+ ]
83
+ },
84
+ "execution_count": 11,
85
+ "metadata": {},
86
+ "output_type": "execute_result"
87
+ }
88
+ ],
89
+ "source": [
90
+ "a"
91
+ ]
92
+ },
93
+ {
94
+ "cell_type": "code",
95
+ "execution_count": 12,
96
+ "metadata": {},
97
+ "outputs": [
98
+ {
99
+ "data": {
100
+ "text/plain": [
101
+ "array([[-1, -1, -1],\n",
102
+ " [-1, -1, -3]])"
103
+ ]
104
+ },
105
+ "execution_count": 12,
106
+ "metadata": {},
107
+ "output_type": "execute_result"
108
+ }
109
+ ],
110
+ "source": [
111
+ "a - b"
112
+ ]
113
+ },
114
+ {
115
+ "cell_type": "code",
116
+ "execution_count": 13,
117
+ "metadata": {},
118
+ "outputs": [
119
+ {
120
+ "data": {
121
+ "text/plain": [
122
+ "1.7320508075688772"
123
+ ]
124
+ },
125
+ "execution_count": 13,
126
+ "metadata": {},
127
+ "output_type": "execute_result"
128
+ }
129
+ ],
130
+ "source": [
131
+ "np.argmin([np.linalg.norm(a[i] - b[i]) for i in range(len(a))])"
132
+ ]
133
+ },
134
+ {
135
+ "cell_type": "code",
136
+ "execution_count": 14,
137
+ "metadata": {},
138
+ "outputs": [],
139
+ "source": [
140
+ "np.min?"
141
+ ]
142
+ },
143
+ {
144
+ "cell_type": "code",
145
+ "execution_count": 2,
146
+ "metadata": {},
147
+ "outputs": [],
148
+ "source": [
149
+ "\n",
150
+ "\"\"\"\n",
151
+ "This is a module for IDW Spatial Interpolation\n",
152
+ "\"\"\"\n",
153
+ "import numpy as np\n",
154
+ "import pandas as pd\n",
155
+ "from copy import deepcopy\n",
156
+ "class idw():\n",
157
+ " \"\"\" A class that is declared for performing IDW Interpolation.\n",
158
+ " For more information on how this method works, kindly refer to\n",
159
+ " https://en.wikipedia.org/wiki/Inverse_distance_weighting\n",
160
+ "\n",
161
+ " Parameters\n",
162
+ " ----------\n",
163
+ " exponent : positive float, optional\n",
164
+ " The rate of fall of values from source data points.\n",
165
+ " Higher the exponent, lower is the value when we move\n",
166
+ " across space. Default value is 2.\n",
167
+ " resolution: str, optional\n",
168
+ " Decides the smoothness of the interpolation. Note that\n",
169
+ " interpolation is done over a grid. Higher the resolution\n",
170
+ " means more grid cells and more time for interpolation.\n",
171
+ " Default value is 'standard'\n",
172
+ " coordinate_type: str, optional\n",
173
+ " Decides the distance metric to be used, while performing\n",
174
+ " interpolation. Euclidean by default. \n",
175
+ " \"\"\"\n",
176
+ " def __init__(self, exponent = 2, resolution = 'standard', coordinate_type='Euclidean'):\n",
177
+ " \n",
178
+ " self.exponent = exponent\n",
179
+ " self.resolution = resolution\n",
180
+ " self.coordinate_type = coordinate_type\n",
181
+ " self.interpolated_values = None\n",
182
+ " self.x_grid = None\n",
183
+ " self.y_grid = None\n",
184
+ "\n",
185
+ " def make_grid(self, x, y, res, offset=0.2):\n",
186
+ "\n",
187
+ " \"\"\" This function returns the grid to perform interpolation on.\n",
188
+ " This function is used inside the fit() attribute of the idw class.\n",
189
+ " \n",
190
+ " Parameters\n",
191
+ " ----------\n",
192
+ " x: array-like, shape(n_samples,)\n",
193
+ " The first coordinate values of all points where\n",
194
+ " ground truth is available\n",
195
+ " y: array-like, shape(n_samples,)\n",
196
+ " The second coordinate values of all points where\n",
197
+ " ground truth is available\n",
198
+ " res: int\n",
199
+ " The resolution value\n",
200
+ " offset: float, optional\n",
201
+ " A value between 0 and 0.5 that specifies the extra interpolation to be done\n",
202
+ " Default is 0.2\n",
203
+ " \n",
204
+ " Returns\n",
205
+ " -------\n",
206
+ " xx : {array-like, 2D}, shape (n_samples, n_samples)\n",
207
+ " yy : {array-like, 2D}, shape (n_samples, n_samples)\n",
208
+ " \"\"\"\n",
209
+ " y_min = y.min() - offset\n",
210
+ " y_max = y.max()+ offset\n",
211
+ " x_min = x.min()-offset\n",
212
+ " x_max = x.max()+offset\n",
213
+ " x_arr = np.linspace(x_min,x_max,res)\n",
214
+ " y_arr = np.linspace(y_min,y_max,res)\n",
215
+ " xx,yy = np.meshgrid(x_arr,y_arr) \n",
216
+ " return xx,yy\n",
217
+ "\n",
218
+ " \n",
219
+ " def fit(self, X, y):\n",
220
+ " \"\"\" The function call to fit the model on the given data. \n",
221
+ " Parameters\n",
222
+ " ----------\n",
223
+ " X: {array-like, 2D matrix}, shape(n_samples, 2)\n",
224
+ " The set of all coordinates, where we have ground truth\n",
225
+ " values\n",
226
+ " y: array-like, shape(n_samples,)\n",
227
+ " The set of all the ground truth values using which\n",
228
+ " we perform interpolation\n",
229
+ "\n",
230
+ " Returns\n",
231
+ " -------\n",
232
+ " self : object\n",
233
+ " Returns self\n",
234
+ " \"\"\"\n",
235
+ "\n",
236
+ "# if self.coordinate_type == 'latlong_small':\n",
237
+ "# \t \t\"\"\"\n",
238
+ "# \t \t\tUse the conversions and projections for small changes in LatLong\n",
239
+ "# \t\t\"\"\"\n",
240
+ "# \t \t print (\"To be done later\")\n",
241
+ "# return self\n",
242
+ "\n",
243
+ "# if self.coordinate_type == 'latlong_large':\n",
244
+ "# \"\"\"\n",
245
+ "# Code to be written after understanding all the projections.\n",
246
+ "# \"\"\"\n",
247
+ "# print (\"To be done later\")\n",
248
+ "# return self\n",
249
+ "\n",
250
+ " if self.coordinate_type==\"Euclidean\":\n",
251
+ " \n",
252
+ " X = deepcopy(np.c_[X,y])\n",
253
+ "\n",
254
+ " if self.resolution=='high':\n",
255
+ " xx,yy = self.make_grid(X,y,1000)\n",
256
+ " \n",
257
+ " if self.resolution=='low':\n",
258
+ " xx,yy = self.make_grid(X,y,10)\n",
259
+ " \n",
260
+ " if self.resolution=='standard':\n",
261
+ " xx,yy = self.make_grid(X,y,100)\n",
262
+ "\n",
263
+ " new = []\n",
264
+ " new_arr = deepcopy(X)\n",
265
+ " for points in new_arr:\n",
266
+ " min_dist = np.inf\n",
267
+ " val = 0\n",
268
+ " for j in range(len(yy)):\n",
269
+ " temp = yy[j][0]\n",
270
+ " for i in range(len(xx[0])):\n",
271
+ " dist = np.linalg.norm(np.array([xx[0][i],temp]) - points[:2])\n",
272
+ " if dist<min_dist:\n",
273
+ " min_dist = dist\n",
274
+ " val = (i,j)\n",
275
+ " new.append((points,val))\n",
276
+ " new_grid = np.zeros((len(xx),len(yy)))\n",
277
+ " for i in range(len(new)):\n",
278
+ " x = new[i][1][0]\n",
279
+ " y = new[i][1][1]\n",
280
+ " new_grid[x][y] = new[i][0][2]\n",
281
+ " x_nz,y_nz = np.nonzero(new_grid)\n",
282
+ " list_nz = []\n",
283
+ " for i in range(len(x_nz)):\n",
284
+ " list_nz.append((x_nz[i],y_nz[i]))\n",
285
+ " final = np.copy(new_grid)\n",
286
+ " for i in range(len(xx[0])):\n",
287
+ " for j in range(len(yy)):\n",
288
+ " normalise = 0\n",
289
+ " if (i,j) in list_nz:\n",
290
+ " continue\n",
291
+ " else:\n",
292
+ " for elem in range(len(x_nz)):\n",
293
+ " source = np.array([x_nz[elem],y_nz[elem]])\n",
294
+ " target = np.array([xx[0][i],yy[j][0]])\n",
295
+ " dist = (np.abs(xx[0][source[0]] - target[0])**self.exponent + np.abs(yy[source[1]][0] - target[1])**self.exponent)**(1/self.exponent)\n",
296
+ " final[i][j]+=new_grid[x_nz[elem],y_nz[elem]]/dist\n",
297
+ " normalise+=1/(dist)\n",
298
+ " final[i][j]/=normalise\n",
299
+ " self.interpolated_values = final\n",
300
+ " self.x_grid = xx\n",
301
+ " self.y_grid = yy\n",
302
+ " \n",
303
+ " return self\n",
304
+ "\n",
305
+ "# def predict(self, X):\n",
306
+ "# \"\"\" The function call to predict using the interpolated data\n",
307
+ "# Parameters\n",
308
+ "# ----------\n",
309
+ "# X: {array-like, 2D matrix}, shape(n_samples, 2)\n",
310
+ "# The set of all coordinates, where we have ground truth\n",
311
+ "# values\n",
312
+ " \n",
313
+ "\n",
314
+ "# Returns\n",
315
+ "# -------\n",
316
+ "# y: array-like, shape(n_samples,)\n",
317
+ "# The set of all the ground truth values using which\n",
318
+ "# we perform interpolation \n",
319
+ "# \"\"\"\n",
320
+ "# if self.coordinate_type == 'Euclidean':\n",
321
+ "# for i in range(self.x_grid[0]):\n",
322
+ "# for j in range()\n",
323
+ " \n",
324
+ "# else:\n",
325
+ "# print(\"Will be done later\")\n",
326
+ "# return \n",
327
+ " \n",
328
+ " \n",
329
+ "# self.x_grid\n",
330
+ "\n"
331
+ ]
332
+ },
333
+ {
334
+ "cell_type": "code",
335
+ "execution_count": 6,
336
+ "metadata": {},
337
+ "outputs": [
338
+ {
339
+ "data": {
340
+ "text/plain": [
341
+ "<__main__.idw at 0x7f36db6f9c88>"
342
+ ]
343
+ },
344
+ "execution_count": 6,
345
+ "metadata": {},
346
+ "output_type": "execute_result"
347
+ }
348
+ ],
349
+ "source": [
350
+ "a = idw()\n",
351
+ "import pandas as pd\n",
352
+ "df = pd.read_csv('../../testdata/30-03-18.csv')\n",
353
+ "data = np.array(df[['longitude','latitude','value']])\n",
354
+ "a.fit(data[:,:2],data[:,2])"
355
+ ]
356
+ },
357
+ {
358
+ "cell_type": "code",
359
+ "execution_count": 5,
360
+ "metadata": {},
361
+ "outputs": [
362
+ {
363
+ "data": {
364
+ "text/plain": [
365
+ "array([[171.89189189, 171.89597641, 171.90813547, ..., 173.89050472,\n",
366
+ " 173.89261459, 173.89466512],\n",
367
+ " [171.77142857, 171.77625338, 171.79060316, ..., 173.89585441,\n",
368
+ " 173.89787202, 173.89983245],\n",
369
+ " [171.63636364, 171.64211895, 171.65921778, ..., 173.9012935 ,\n",
370
+ " 173.90321551, 173.90508269],\n",
371
+ " ...,\n",
372
+ " [174.49681529, 174.49676176, 174.49660126, ..., 174.24671184,\n",
373
+ " 174.24416446, 174.24164382],\n",
374
+ " [174.49056604, 174.49051451, 174.49035999, ..., 174.24671343,\n",
375
+ " 174.24419773, 174.2417078 ],\n",
376
+ " [174.48447205, 174.48442242, 174.48427358, ..., 174.2466762 ,\n",
377
+ " 174.24419219, 174.24173298]])"
378
+ ]
379
+ },
380
+ "execution_count": 5,
381
+ "metadata": {},
382
+ "output_type": "execute_result"
383
+ }
384
+ ],
385
+ "source": [
386
+ "a.interpolated_values"
387
+ ]
388
+ }
389
+ ],
390
+ "metadata": {
391
+ "kernelspec": {
392
+ "display_name": "Python 3",
393
+ "language": "python",
394
+ "name": "python3"
395
+ },
396
+ "language_info": {
397
+ "codemirror_mode": {
398
+ "name": "ipython",
399
+ "version": 3
400
+ },
401
+ "file_extension": ".py",
402
+ "mimetype": "text/x-python",
403
+ "name": "python",
404
+ "nbconvert_exporter": "python",
405
+ "pygments_lexer": "ipython3",
406
+ "version": "3.6.8"
407
+ }
408
+ },
409
+ "nbformat": 4,
410
+ "nbformat_minor": 2
411
+ }
polire/kriging/__init__.py ADDED
File without changes
polire/kriging/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (143 Bytes). View file
 
polire/kriging/__pycache__/kriging.cpython-310.pyc ADDED
Binary file (4.67 kB). View file
 
polire/kriging/kriging.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This is a module for Kriging Interpolation
3
+ """
4
+ import numpy as np
5
+ from ..base import Base
6
+ from pykrige.ok import OrdinaryKriging
7
+ from pykrige.uk import UniversalKriging
8
+
9
+
10
+ class Kriging(Base):
11
+ """A class that is declared for performing Kriging interpolation.
12
+ Kriging interpolation (usually) works on the principle of finding the
13
+ best unbiased predictor. Ordinary Kriging, for an example, involves finding out the
14
+ best unbaised linear predictor.
15
+
16
+ Parameters
17
+ ----------
18
+ type : str, optional
19
+ This parameter defines the type of Kriging under consideration. This
20
+ implementation uses PyKrige package (https://github.com/bsmurphy/PyKrige).
21
+ The user needs to choose between "Ordinary" and "Universal".
22
+
23
+ plotting: boolean, optional
24
+ This parameter plots the fit semivariogram. We use PyKrige's inbuilt plotter for the same.s
25
+
26
+ variogram_model : str, optional
27
+ Specifies which variogram model to use; may be one of the following:
28
+ linear, power, gaussian, spherical, exponential, hole-effect.
29
+ Default is linear variogram model. To utilize a custom variogram model,
30
+ specify 'custom'; you must also provide variogram_parameters and
31
+ variogram_function. Note that the hole-effect model is only technically
32
+ correct for one-dimensional problems.
33
+
34
+ require_variance : Boolean, optional
35
+ This variable returns the uncertainity in the interpolated values using Kriging
36
+ interpolation. If this is True, kindly call the attribute return_variance, of this class
37
+ to retreive the computed variances. False is the default value.d
38
+
39
+ nlags: int, optional
40
+ Number of lags to be considered for semivariogram. As in PyKrige, we set default to be 6.
41
+ """
42
+
43
+ def __init__(
44
+ self,
45
+ type="Ordinary",
46
+ plotting=False,
47
+ variogram_model="linear",
48
+ require_variance=False,
49
+ resolution="standard",
50
+ coordinate_type="Eucledian",
51
+ nlags=6,
52
+ ):
53
+ super().__init__(resolution, coordinate_type)
54
+ self.variogram_model = variogram_model
55
+ self.ok = None
56
+ self.uk = None
57
+ self.type = type
58
+ self.plotting = plotting
59
+ self.coordinate_type = None
60
+ self.require_variance = require_variance
61
+ self.variance = None
62
+
63
+ if coordinate_type == "Eucledian":
64
+ self.coordinate_type = "euclidean"
65
+ else:
66
+ self.coordinate_type = "geographic"
67
+
68
+ self.nlags = nlags
69
+
70
+ def _fit(self, X, y):
71
+ """This method of the Kriging Class is used to fit Kriging interpolation model to
72
+ the train data. This function shouldn't be called directly."""
73
+ if self.type == "Ordinary":
74
+ self.ok = OrdinaryKriging(
75
+ X[:, 0],
76
+ X[:, 1],
77
+ y,
78
+ variogram_model=self.variogram_model,
79
+ enable_plotting=self.plotting,
80
+ coordinates_type=self.coordinate_type,
81
+ nlags=self.nlags,
82
+ )
83
+
84
+ elif self.type == "Universal":
85
+ self.uk = UniversalKriging(
86
+ X[:, 0],
87
+ X[:, 1],
88
+ y,
89
+ variogram_model=self.variogram_model,
90
+ enable_plotting=self.plotting,
91
+ )
92
+
93
+ else:
94
+ raise ValueError(
95
+ "Choose either Universal or Ordinary - Given argument is neither"
96
+ )
97
+
98
+ return self
99
+
100
+ def _predict_grid(self, x1lim, x2lim):
101
+ """The function that is called to return the interpolated data in Kriging Interpolation
102
+ in a grid. This method shouldn't be called directly"""
103
+ lims = (*x1lim, *x2lim)
104
+ x1min, x1max, x2min, x2max = lims
105
+ x1 = np.linspace(x1min, x1max, self.resolution)
106
+ x2 = np.linspace(x2min, x2max, self.resolution)
107
+
108
+ if self.ok is not None:
109
+ predictions, self.variance = self.ok.execute(
110
+ style="grid", xpoints=x1, ypoints=x2
111
+ )
112
+
113
+ else:
114
+ predictions, self.variance = self.uk.execute(
115
+ style="grid", xpoints=x1, ypoints=x2
116
+ )
117
+
118
+ return predictions
119
+
120
+ def _predict(self, X):
121
+ """This function should be called to return the interpolated data in kriging
122
+ in a pointwise manner. This method shouldn't be called directly."""
123
+ if self.ok is not None:
124
+ predictions, self.variance = self.ok.execute(
125
+ style="points", xpoints=X[:, 0], ypoints=X[:, 1]
126
+ )
127
+
128
+ else:
129
+ predictions, self.variance = self.uk.execute(
130
+ style="points", xpoints=X[:, 0], ypoints=X[:, 1]
131
+ )
132
+
133
+ return predictions
134
+
135
+ def return_variance(self):
136
+ """This method of the Kriging class returns the variance at the interpolated
137
+ points if the user chooses to use this option at the beginning of the interpolation
138
+ """
139
+ if self.require_variance:
140
+ return self.variance
141
+
142
+ else:
143
+ print(
144
+ "Variance not asked for, while instantiating the object. Returning None"
145
+ )
146
+ return None
polire/kriging/tests/Kriging Interpolation.ipynb ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from pykrige import OrdinaryKriging"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 4,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "import pandas as pd\n",
19
+ "import numpy as np"
20
+ ]
21
+ },
22
+ {
23
+ "cell_type": "code",
24
+ "execution_count": 38,
25
+ "metadata": {},
26
+ "outputs": [],
27
+ "source": []
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": 10,
32
+ "metadata": {},
33
+ "outputs": [],
34
+ "source": [
35
+ "ok = OrdinaryKriging(data[:,0],data[:,1],data[:,2])\n",
36
+ "ok.ex"
37
+ ]
38
+ },
39
+ {
40
+ "cell_type": "code",
41
+ "execution_count": 43,
42
+ "metadata": {},
43
+ "outputs": [],
44
+ "source": [
45
+ "a,b = ok.execute('grid',x[0],y[:,0])"
46
+ ]
47
+ },
48
+ {
49
+ "cell_type": "code",
50
+ "execution_count": 61,
51
+ "metadata": {},
52
+ "outputs": [],
53
+ "source": [
54
+ "from pykrige import OrdinaryKriging\n",
55
+ "import pandas as pd\n",
56
+ "import numpy as np\n",
57
+ "\n",
58
+ "def ordinary_kriging(dataset, resolution='standard', coordinate_type='euclidean',verbose='False',method='grid', isvariance = False):\n",
59
+ " if coordinate_type == 'latlong_small':\n",
60
+ " \"\"\"\n",
61
+ " Assume that the Earth is a Sphere, and use polar coordinates\n",
62
+ " $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n",
63
+ " \"\"\"\n",
64
+ " return \"To be done later\"\n",
65
+ " if coordinate_type == 'latlong_large':\n",
66
+ " \"\"\"\n",
67
+ " Code to be written after understanding all the projections.\n",
68
+ " \"\"\"\n",
69
+ " return \"To be done later\"\n",
70
+ " if coordinate_type==\"euclidean\":\n",
71
+ " \n",
72
+ " ok = OrdinaryKriging(dataset[:,0],dataset[:,1],dataset[:,2])\n",
73
+ " X = dataset[:,0]\n",
74
+ " y = dataset[:,1]\n",
75
+ " \n",
76
+ " if resolution=='high':\n",
77
+ " xx,yy = make_grid(X,y,1000)\n",
78
+ " \n",
79
+ " elif resolution=='low':\n",
80
+ " xx,yy = make_grid(X,y,10)\n",
81
+ " \n",
82
+ " elif resolution=='standard':\n",
83
+ " xx,yy = make_grid(X,y,100)\n",
84
+ " \n",
85
+ " else:\n",
86
+ " print('Value Error - Resolution can only be one of \\nhigh, low or standard')\n",
87
+ " \n",
88
+ " values, variances = ok.execute(method, xx[0], yy[:,0])\n",
89
+ " \n",
90
+ " if isvariance:\n",
91
+ " return values, variances\n",
92
+ " else:\n",
93
+ " del variances\n",
94
+ " return np.array(values)"
95
+ ]
96
+ },
97
+ {
98
+ "cell_type": "code",
99
+ "execution_count": 62,
100
+ "metadata": {},
101
+ "outputs": [
102
+ {
103
+ "data": {
104
+ "text/plain": [
105
+ "array([[129.94984945, 129.7682324 , 129.58820662, ..., 159.34079485,\n",
106
+ " 159.99175016, 160.63241067],\n",
107
+ " [130.22090025, 130.03615966, 129.8529146 , ..., 159.9575165 ,\n",
108
+ " 160.61228126, 161.25625641],\n",
109
+ " [130.50105231, 130.31324536, 130.12683652, ..., 160.59265384,\n",
110
+ " 161.25084023, 161.8977369 ],\n",
111
+ " ...,\n",
112
+ " [207.22133238, 207.82739139, 208.44615116, ..., 248.64646661,\n",
113
+ " 248.3790241 , 248.11033441],\n",
114
+ " [207.92838926, 208.53490708, 209.15376273, ..., 248.91678379,\n",
115
+ " 248.65601627, 248.39371596],\n",
116
+ " [208.61942088, 209.22595474, 209.84445913, ..., 249.17442481,\n",
117
+ " 248.9203453 , 248.66446245]])"
118
+ ]
119
+ },
120
+ "execution_count": 62,
121
+ "metadata": {},
122
+ "output_type": "execute_result"
123
+ }
124
+ ],
125
+ "source": [
126
+ "ordinary_kriging(data)"
127
+ ]
128
+ },
129
+ {
130
+ "cell_type": "markdown",
131
+ "metadata": {},
132
+ "source": [
133
+ "* What does ok('points') really do?\n",
134
+ "* Specifically test when points aren't really passed - they are let's say the point of an array\n",
135
+ "* Returns the diagonal matrix of all these coordinates"
136
+ ]
137
+ },
138
+ {
139
+ "cell_type": "code",
140
+ "execution_count": 63,
141
+ "metadata": {
142
+ "scrolled": true
143
+ },
144
+ "outputs": [
145
+ {
146
+ "data": {
147
+ "text/plain": [
148
+ "array([129.94984945, 130.03615966, 130.12683652, 130.22219703,\n",
149
+ " 130.32258826, 130.42839089, 130.54002324, 130.65794596,\n",
150
+ " 130.7826674 , 130.91474976, 131.05481629, 131.20355964,\n",
151
+ " 131.36175158, 131.53025441, 131.71003442, 131.90217771,\n",
152
+ " 132.107909 , 132.32861401, 132.56586607, 132.82145795,\n",
153
+ " 133.0974399 , 133.39616477, 133.72034153, 134.07309736,\n",
154
+ " 134.45804822, 134.87937482, 135.34189663, 135.85112772,\n",
155
+ " 136.41328222, 137.03517039, 137.72388496, 138.48612122,\n",
156
+ " 139.326921 , 140.24763047, 141.24300526, 142.29757046,\n",
157
+ " 143.37881815, 144.38425962, 144.49187978, 143.1202101 ,\n",
158
+ " 141.66667134, 140.45686022, 139.66795657, 142.48270308,\n",
159
+ " 147.03665055, 151.8487008 , 156.90272514, 162.25791164,\n",
160
+ " 168.04938768, 173.63870768, 180.93567147, 190.3440156 ,\n",
161
+ " 199.86834472, 208.48375248, 215.75635742, 222.1915652 ,\n",
162
+ " 228.08641413, 233.15249702, 236.89713686, 239.83524192,\n",
163
+ " 242.45744315, 244.57483343, 245.52139699, 245.88236757,\n",
164
+ " 246.12295211, 246.3306567 , 246.52369882, 246.70598807,\n",
165
+ " 246.87792737, 247.03919426, 247.18952217, 247.3288843 ,\n",
166
+ " 247.45749059, 247.57573348, 247.68412862, 247.78326467,\n",
167
+ " 247.87376505, 247.95626051, 248.03137024, 248.09968963,\n",
168
+ " 248.16178271, 248.21817801, 248.26936683, 248.31580309,\n",
169
+ " 248.35790422, 248.39605277, 248.43059841, 248.46186013,\n",
170
+ " 248.49012851, 248.51566797, 248.53871897, 248.55950011,\n",
171
+ " 248.57821004, 248.59502931, 248.61012204, 248.62363741,\n",
172
+ " 248.63571111, 248.64646661, 248.65601627, 248.66446245])"
173
+ ]
174
+ },
175
+ "execution_count": 63,
176
+ "metadata": {},
177
+ "output_type": "execute_result"
178
+ }
179
+ ],
180
+ "source": [
181
+ "ordinary_kriging(data,method='points')"
182
+ ]
183
+ },
184
+ {
185
+ "cell_type": "code",
186
+ "execution_count": null,
187
+ "metadata": {},
188
+ "outputs": [],
189
+ "source": [
190
+ "def make_grid(X,y,res):\n",
191
+ " y_min = y.min()-0.2\n",
192
+ " y_max = y.max()+0.2\n",
193
+ " x_min = X.min()-0.2\n",
194
+ " x_max = X.max()+0.2\n",
195
+ " x_arr = np.linspace(x_min,x_max,res)\n",
196
+ " y_arr = np.linspace(y_min,y_max,res)\n",
197
+ " xx,yy = np.meshgrid(x_arr,y_arr) \n",
198
+ " return xx,yy\n",
199
+ "x, y = make_grid(data[:,0],data[:,1],100)"
200
+ ]
201
+ }
202
+ ],
203
+ "metadata": {
204
+ "kernelspec": {
205
+ "display_name": "Python 3",
206
+ "language": "python",
207
+ "name": "python3"
208
+ },
209
+ "language_info": {
210
+ "codemirror_mode": {
211
+ "name": "ipython",
212
+ "version": 3
213
+ },
214
+ "file_extension": ".py",
215
+ "mimetype": "text/x-python",
216
+ "name": "python",
217
+ "nbconvert_exporter": "python",
218
+ "pygments_lexer": "ipython3",
219
+ "version": "3.6.8"
220
+ }
221
+ },
222
+ "nbformat": 4,
223
+ "nbformat_minor": 2
224
+ }
polire/natural_neighbors/__init__.py ADDED
File without changes
polire/natural_neighbors/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (153 Bytes). View file
 
polire/natural_neighbors/__pycache__/natural_neighbors.cpython-310.pyc ADDED
Binary file (6.75 kB). View file
 
polire/natural_neighbors/natural_neighbors.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This is a module for Natural Neighbors Interpolation
3
+ """
4
+
5
+ import numpy as np
6
+ from scipy.spatial import Voronoi, voronoi_plot_2d
7
+ import matplotlib.pyplot as plt
8
+ from ..base import Base
9
+ from shapely.geometry import Point
10
+ from shapely.geometry.polygon import Polygon
11
+ from math import atan2
12
+ from copy import deepcopy
13
+
14
+
15
+ def is_row_in_array(row, arr):
16
+ return list(row) in arr.tolist()
17
+
18
+
19
+ def get_index(row, arr):
20
+ t1 = np.where(arr[:, 0] == row[0])
21
+ t2 = np.where(arr[:, 1] == row[1])
22
+ index = np.intersect1d(t1, t2)[0]
23
+ # If length of index exceeds one!! - Uniqueness Error
24
+ return index
25
+
26
+
27
+ def order_poly(vertices):
28
+ """This function essentially is used to order the vertices
29
+ of the Voronoi polygon in a clockwise manner. This ensures
30
+ that Shapely doesn't produce Polygon objects that are potentially
31
+ non-convex and non-zero area.
32
+
33
+ Arguments
34
+ ---------
35
+ vertices : {array-like, 2D matrix}
36
+ This contains the list of vertices of the Polygon to be sorted
37
+
38
+ Returns
39
+ -------
40
+ new_vertices : {array-like, 2D matrix}
41
+ All the vertices reordered in a clockwise manner
42
+ """
43
+ mean_x = np.mean(vertices[:, 0])
44
+ mean_y = np.mean(vertices[:, 1])
45
+
46
+ def condition(x):
47
+ """This is the condition to be used while sorting. We convert the coordinates
48
+ to Polar and sort the points
49
+ """
50
+ return atan2(x[0] - mean_x, x[1] - mean_y) * 180 / np.pi
51
+
52
+ return sorted(vertices, key=condition)
53
+
54
+
55
+ class NaturalNeighbor(Base):
56
+ """Class used for natural neighbors interpolation. This method is an implementation first
57
+ proposed by Sibson et al. [1] in 1981. We use the weights derived using the work in [1]
58
+ and leave it for future addition, the use of Laplace Weights [2].
59
+
60
+ Parameters
61
+ ----------
62
+ weights: str, optional
63
+ This defines the type of weights to be used for natural neighbor interpolation.
64
+ We use Sibson Weights, and plan to add Laplace weights in the future
65
+ Default value is "sibson"
66
+
67
+ display: Boolean, optional
68
+ True value displays the voronoi tesselation to the user after fitting the model.
69
+ Default value is False.
70
+
71
+ Notes
72
+ -----
73
+ This is for contributors:
74
+ The way in which part of the code is used is in the assumption that
75
+ we use the data's ordering to find its voronoi partitions.
76
+
77
+ References
78
+ ----------
79
+ [1] Sibson, R. (1981). "A brief description of natural neighbor interpolation (Chapter 2)". In V. Barnett (ed.). Interpolating Multivariate Data. Chichester: John Wiley. pp. 21–36.
80
+ [2] V.V. Belikov; V.D. Ivanov; V.K. Kontorovich; S.A. Korytnik; A.Y. Semenov (1997). "The non-Sibsonian interpolation: A new method of interpolation of the values of a function on an arbitrary set of points". Computational mathematics and mathematical physics. 37 (1): 9–15.
81
+ [3] N.H. Christ; R. Friedberg, R.; T.D. Lee (1982). "Weights of links and plaquettes in a random lattice". Nuclear Physics B. 210 (3): 337–346.
82
+ """
83
+
84
+ def __init__(
85
+ self,
86
+ weights="sibson",
87
+ display=False,
88
+ resolution="standard",
89
+ coordinate_type="Eucledian",
90
+ ):
91
+ super().__init__(resolution, coordinate_type)
92
+ self.weights = weights
93
+ self.X = None
94
+ self.y = None
95
+ self.result = None
96
+ self.voronoi = None
97
+ self.vertices = (
98
+ None # This variable stored the voronoi partition's vertices
99
+ )
100
+ self.vertex_poly_map = (
101
+ dict()
102
+ ) # This variable stores the polygon to data point map
103
+ self.display = display
104
+
105
+ def _fit(self, X, y):
106
+ """This function is for the natural neighbors interpolation method.
107
+ This is not expected to be called directly.
108
+ """
109
+ self.X = X
110
+ self.y = y
111
+ self.voronoi = Voronoi(X, incremental=True)
112
+ self.vertices = self.voronoi.vertices
113
+
114
+ self.vertex_poly_map = {i: 0 for i in range(len(X))}
115
+
116
+ for i in range(len(self.X)):
117
+ index = np.where(self.voronoi.point_region == i)[0][0]
118
+ point = Point(self.X[index])
119
+ region = self.voronoi.regions[i]
120
+ if -1 not in region and region != []:
121
+ # -1 corresponds to unbounded region - we can't have this in interpolation
122
+ # and the function returns an empty list anyways
123
+ # at least in the case of non-incremental NN
124
+ p = Polygon(order_poly(self.vertices[region]))
125
+ self.vertex_poly_map[index] = p
126
+ # Remove all the data points that do not contribute to Nearest Neighhbor interpolation
127
+ for i in range(len(self.vertex_poly_map)):
128
+ if self.vertex_poly_map[i] == 0:
129
+ self.vertex_poly_map.pop(i, None)
130
+
131
+ if self.display:
132
+ voronoi_plot_2d(self.voronoi)
133
+ plt.show()
134
+ self.display = False
135
+
136
+ return self
137
+
138
+ def _predict_grid(self, x1lim, x2lim):
139
+ """Gridded interpolation for natural neighbors interpolation. This function should not
140
+ be called directly.
141
+ """
142
+ lims = (*x1lim, *x2lim)
143
+ x1min, x1max, x2min, x2max = lims
144
+ x1 = np.linspace(x1min, x1max, self.resolution)
145
+ x2 = np.linspace(x2min, x2max, self.resolution)
146
+ X1, X2 = np.meshgrid(x1, x2)
147
+ return self._predict(np.array([X1.ravel(), X2.ravel()]).T)
148
+
149
+ def _predict(self, X):
150
+ """The function taht is called to predict the interpolated data in Natural Neighbors
151
+ interpolation. This should not be called directly.
152
+ If this method returns None, then we cannot interpolate because of the formed Voronoi
153
+ Tesselation
154
+ """
155
+ result = np.zeros(len(X))
156
+ # Potentially create so many class objects as the
157
+ # length of the to be predicted array
158
+ # not a bad idea if memory is not a constraints
159
+ for index in range(len(X)):
160
+ if is_row_in_array(X[index], self.X):
161
+ idx = get_index(X[index], self.X)
162
+ # Check if query data point already exists
163
+ result[index] = self.y[idx]
164
+
165
+ else:
166
+ # QHull object can't bgit ae pickled. Deepcopy doesn't work.
167
+ # So we need to fit the model for each and every query data point.
168
+ self._fit(self.X, self.y)
169
+
170
+ vor = self.voronoi
171
+ vor.add_points(np.array([X[index]]))
172
+ vor.close()
173
+ # We exploit the incremental processing of Scipy's Voronoi.
174
+ # We create a copy to ensure that the original copy is preserved.
175
+ new_regions = vor.regions
176
+ new_vertices = vor.vertices
177
+ final_regions = []
178
+
179
+ for i in new_regions:
180
+ if i != [] and -1 not in i:
181
+ final_regions.append(i)
182
+
183
+ new = [] # this stores the newly created voronoi partitions
184
+ for i in range(len(new_vertices)):
185
+ if new_vertices[i] not in self.vertices:
186
+ new.append(new_vertices[i])
187
+ new = np.array(new)
188
+ if len(new) < 3:
189
+ # We need atleast a traingle to interpolate
190
+ # Three new voronoi vertices form a triangle
191
+ result[index] = np.nan
192
+ continue
193
+
194
+ weights = {} # Weights that we use for interpolation
195
+ new_polygon = Polygon(order_poly(new))
196
+ new_polygon_area = new_polygon.area
197
+
198
+ for i in self.vertex_poly_map:
199
+ if new_polygon.intersects(self.vertex_poly_map[i]):
200
+ weights[i] = (
201
+ new_polygon.intersection(self.vertex_poly_map[i])
202
+ ).area / new_polygon_area
203
+
204
+ prediction = np.array(
205
+ [self.y[i] * weights[i] for i in weights]
206
+ ).sum()
207
+ result[index] = prediction
208
+ del vor, weights, new_polygon, new_polygon_area
209
+
210
+ return result
polire/nsgp/__init__.py ADDED
File without changes
polire/nsgp/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (140 Bytes). View file
 
polire/nsgp/__pycache__/nsgp.cpython-310.pyc ADDED
Binary file (6.8 kB). View file